├── aiosql ├── py.typed ├── adapters │ ├── pg8000.py │ ├── apyformat.py │ ├── __init__.py │ ├── sqlite3.py │ ├── pyformat.py │ ├── mysql.py │ ├── aiosqlite.py │ ├── ageneric.py │ ├── duckdb.py │ ├── generic.py │ └── asyncpg.py ├── __init__.py ├── utils.py ├── types.py ├── aiosql.py └── query_loader.py ├── .dockerignore ├── tests ├── blogdb │ ├── sql │ │ ├── empty.sql │ │ ├── blogs │ │ │ ├── blogs.oops │ │ │ ├── pg │ │ │ │ ├── asyncpg │ │ │ │ │ └── blogs.sql │ │ │ │ ├── pg8000 │ │ │ │ │ └── blogs.sql │ │ │ │ └── blogs.sql │ │ │ ├── my │ │ │ │ └── blogs.sql │ │ │ ├── du │ │ │ │ └── blogs.sql │ │ │ ├── ms │ │ │ │ └── blogs.sql │ │ │ ├── li │ │ │ │ └── blogs.sql │ │ │ └── blogs.sql │ │ ├── misc │ │ │ ├── du │ │ │ │ └── misc.sql │ │ │ ├── li │ │ │ │ └── misc.sql │ │ │ ├── my │ │ │ │ ├── pymysql │ │ │ │ │ └── misc.sql │ │ │ │ └── misc.sql │ │ │ ├── pg │ │ │ │ ├── pg8000 │ │ │ │ │ └── misc.sql │ │ │ │ └── misc.sql │ │ │ ├── ms │ │ │ │ └── misc.sql │ │ │ └── misc.sql │ │ ├── users │ │ │ ├── li │ │ │ │ └── users.sql │ │ │ ├── pg │ │ │ │ ├── asyncpg │ │ │ │ │ └── users.sql │ │ │ │ └── pg8000 │ │ │ │ │ └── users.sql │ │ │ ├── du │ │ │ │ └── users.sql │ │ │ ├── ms │ │ │ │ └── users.sql │ │ │ └── users.sql │ │ └── comments │ │ │ ├── pg │ │ │ └── comments.sql │ │ │ ├── li │ │ │ └── comments.sql │ │ │ ├── ms │ │ │ └── comments.sql │ │ │ ├── my │ │ │ └── comments.sql │ │ │ └── du │ │ │ └── comments.sql │ └── data │ │ ├── users_data.csv │ │ └── blogs_data.csv ├── Makefile ├── wait.py ├── pytest.ini ├── conf_sqlite.py ├── conf_duckdb.py ├── test_sqlite3.py ├── test_duckdb.py ├── test_aiosqlite.py ├── test_pymssql.py ├── test_myco.py ├── test_mysqldb.py ├── test_psycopg2.py ├── test_pygresql.py ├── conftest.py ├── test_pymysql.py ├── test_pg8000.py ├── test_psycopg3.py ├── test_mariadb.py ├── utils.py ├── test_apsycopg3.py ├── test_apsw.py ├── conf_pgsql.py ├── conf_schema.py ├── conf_mysql.py ├── conf_mssql.py ├── test_patterns.py ├── test_asyncpg.py └── test_loading.py ├── README.rst ├── docker ├── .gitignore ├── dockerfile.python-postgres ├── dockerfile.python-mysql ├── dockerfile.python-dbs ├── Makefile ├── docker-compose.yml └── README.md ├── MANIFEST.in ├── .github ├── scripts │ ├── package-build.sh │ └── docs.sh └── workflows │ ├── deploy-pages.yml │ └── aiosql-package.yml ├── .flake8 ├── example ├── sql │ ├── users │ │ └── users.sql │ ├── create_schema.sql │ └── blogs │ │ └── blogs.sql ├── greetings.sql ├── greetings.py ├── greetings.sh ├── greetings_async.py ├── greetings_create.sql ├── observe_query.py ├── greetings_cursor.py ├── pg_execute_values.py └── example.py ├── .editorconfig ├── docs └── source │ ├── todo.rst │ ├── advanced-topics.rst │ ├── conf.py │ ├── contributing.rst │ ├── database-driver-adapters.rst │ ├── defining-sql-queries.rst │ ├── versions.rst │ ├── index.rst │ └── getting-started.rst ├── LICENSE ├── .gitignore └── pyproject.toml /aiosql/py.typed: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /.dockerignore: -------------------------------------------------------------------------------- 1 | .gitignore -------------------------------------------------------------------------------- /tests/blogdb/sql/empty.sql: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | docs/source/index.rst -------------------------------------------------------------------------------- /docker/.gitignore: -------------------------------------------------------------------------------- 1 | .docker-aiosql-* 2 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | recursive-include docs * 2 | -------------------------------------------------------------------------------- /tests/Makefile: -------------------------------------------------------------------------------- 1 | clean: 2 | $(RM) -r __pycache__ .pytest_cache 3 | -------------------------------------------------------------------------------- /tests/blogdb/sql/blogs/blogs.oops: -------------------------------------------------------------------------------- 1 | This file should be ignored! 2 | -------------------------------------------------------------------------------- /tests/blogdb/data/users_data.csv: -------------------------------------------------------------------------------- 1 | bobsmith,Bob,Smith 2 | johndoe,John,Doe 3 | janedoe,Jane,Doe 4 | -------------------------------------------------------------------------------- /tests/blogdb/sql/misc/du/misc.sql: -------------------------------------------------------------------------------- 1 | -- name: get_now_date_time()$ 2 | select strftime(now(),'%Y-%m-%d %H:%M:%S') AS now; 3 | -------------------------------------------------------------------------------- /tests/blogdb/sql/misc/li/misc.sql: -------------------------------------------------------------------------------- 1 | -- name: get_now_date_time()$ 2 | SELECT strftime('%Y-%m-%d %H:%M:%S', datetime()) AS now; 3 | -------------------------------------------------------------------------------- /tests/blogdb/sql/users/li/users.sql: -------------------------------------------------------------------------------- 1 | -- name: add_many_users*! 2 | INSERT INTO users(username, firstname, lastname) VALUES (?, ?, ?); 3 | -------------------------------------------------------------------------------- /.github/scripts/package-build.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | set -euo pipefail 3 | 4 | python -m build --sdist 5 | python -m build --wheel 6 | -------------------------------------------------------------------------------- /tests/blogdb/sql/users/pg/asyncpg/users.sql: -------------------------------------------------------------------------------- 1 | -- name: add_many_users*! 2 | INSERT INTO users(username, firstname, lastname) 3 | VALUES (:name, :fname, :lname); 4 | -------------------------------------------------------------------------------- /tests/blogdb/sql/users/pg/pg8000/users.sql: -------------------------------------------------------------------------------- 1 | -- name: add_many_users*! 2 | INSERT INTO users(username, firstname, lastname) 3 | VALUES (:name, :fname, :lname); 4 | -------------------------------------------------------------------------------- /tests/blogdb/sql/users/du/users.sql: -------------------------------------------------------------------------------- 1 | -- name: add_many_users*! 2 | INSERT INTO users(userid, username, firstname, lastname) 3 | VALUES (NEXTVAL('users_seq'), ?, ?, ?); 4 | -------------------------------------------------------------------------------- /tests/blogdb/sql/misc/my/pymysql/misc.sql: -------------------------------------------------------------------------------- 1 | -- name: get-modulo(numerator, denominator)$ 2 | -- escaped percent modulo operator 3 | SELECT :numerator %% :denominator AS modulo; 4 | 5 | -------------------------------------------------------------------------------- /tests/blogdb/sql/blogs/pg/asyncpg/blogs.sql: -------------------------------------------------------------------------------- 1 | -- name: add_many_blogs*! 2 | INSERT INTO blogs (userid, title, content, published) 3 | VALUES (:userid, :title, :content, :published); 4 | -------------------------------------------------------------------------------- /tests/blogdb/sql/blogs/pg/pg8000/blogs.sql: -------------------------------------------------------------------------------- 1 | -- name: add_many_blogs*! 2 | INSERT INTO blogs (userid, title, content, published) 3 | VALUES (:userid, :title, :content, :published); 4 | -------------------------------------------------------------------------------- /tests/blogdb/sql/misc/pg/pg8000/misc.sql: -------------------------------------------------------------------------------- 1 | -- name: get-modulo(numerator, denominator)$ 2 | -- no-escaped percent modulo operator 3 | SELECT :numerator::INT8 % :denominator::INT8 AS modulo; 4 | -------------------------------------------------------------------------------- /.flake8: -------------------------------------------------------------------------------- 1 | [flake8] 2 | # Derived from the one from Black itself. Ignore rules that are handled by Black. 3 | ignore = E203, E266, E501, W503, E704 4 | select = B,C,E,F,W,T4,B9 5 | max-line-length = 100 6 | -------------------------------------------------------------------------------- /tests/blogdb/sql/misc/my/misc.sql: -------------------------------------------------------------------------------- 1 | -- name: get_now_date_time()$ 2 | SELECT date_format(NOW(), '%Y-%m-%d %H:%i:%S') AS now; 3 | 4 | -- name: escape-quotes$ 5 | -- SELECT 'L\'art du rire' AS escaped; 6 | SELECT 1; 7 | -------------------------------------------------------------------------------- /aiosql/adapters/pg8000.py: -------------------------------------------------------------------------------- 1 | from .generic import GenericAdapter 2 | 3 | 4 | class Pg8000Adapter(GenericAdapter): 5 | def _cursor(self, conn): 6 | import pg8000 7 | 8 | return pg8000.Cursor(conn, paramstyle="named") 9 | -------------------------------------------------------------------------------- /tests/blogdb/data/blogs_data.csv: -------------------------------------------------------------------------------- 1 | 1,What I did Today,"I mowed the lawn - washed some clothes - ate a burger.",2017-07-28 2 | 3,Testing,Is this thing on?,2018-01-01 3 | 1,How to make a pie.,"1. Make crust\n2. Fill\n3. Bake\n4.Eat",2018-11-23 4 | -------------------------------------------------------------------------------- /tests/blogdb/sql/comments/pg/comments.sql: -------------------------------------------------------------------------------- 1 | -- name: create-table# 2 | create table comments ( 3 | commentid serial primary key, 4 | blogid integer not null references blogs(blogid), 5 | author text not null, 6 | content text not null 7 | ) 8 | -------------------------------------------------------------------------------- /example/sql/users/users.sql: -------------------------------------------------------------------------------- 1 | -- name: insert_many*! 2 | insert into users(username, firstname, lastname) values (?, ?, ?); 3 | 4 | 5 | -- name: get_all 6 | select userid, 7 | username, 8 | firstname, 9 | lastname 10 | from users; 11 | -------------------------------------------------------------------------------- /tests/blogdb/sql/comments/li/comments.sql: -------------------------------------------------------------------------------- 1 | -- name: create-table# 2 | create table comments ( 3 | commentid integer primary key, 4 | blogid integer not null, 5 | author text not null, 6 | content text not null, 7 | foreign key(blogid) references blogs(blogid) 8 | ); 9 | -------------------------------------------------------------------------------- /tests/blogdb/sql/comments/ms/comments.sql: -------------------------------------------------------------------------------- 1 | -- name: create-table# 2 | create table comments ( 3 | commentid integer identity(1, 1) primary key, 4 | blogid integer not null references blogs(blogid), 5 | author varchar(255) not null, 6 | content varchar(max) not null 7 | ) 8 | -------------------------------------------------------------------------------- /tests/blogdb/sql/misc/ms/misc.sql: -------------------------------------------------------------------------------- 1 | -- name: get_now_date_time()$ 2 | SELECT CONVERT(VARCHAR, CURRENT_TIMESTAMP, 120) AS now; 3 | 4 | -- name: empty()$ 5 | SELECT 'hello' AS message WHERE 0 = 1; 6 | 7 | -- name: get-modulo(numerator, denominator)$ 8 | SELECT :numerator % :denominator AS modulo; 9 | -------------------------------------------------------------------------------- /tests/blogdb/sql/comments/my/comments.sql: -------------------------------------------------------------------------------- 1 | -- name: create-table# 2 | create table comments ( 3 | commentid integer primary key auto_increment, 4 | blogid integer not null, 5 | author text not null, 6 | content text not null, 7 | foreign key(blogid) references blogs(blogid) 8 | ); 9 | -------------------------------------------------------------------------------- /tests/wait.py: -------------------------------------------------------------------------------- 1 | #! /usr/bin/env python 2 | 3 | import sys 4 | import utils as u 5 | 6 | assert len(sys.argv) == 4, f"usage: {sys.argv[0]} host port tries" 7 | 8 | host, port, tries = sys.argv[1], int(sys.argv[2]), int(sys.argv[3]) 9 | 10 | sys.exit(0 if u.has_service(host, port, tries) else 1) 11 | -------------------------------------------------------------------------------- /aiosql/__init__.py: -------------------------------------------------------------------------------- 1 | from .aiosql import from_path, from_str, register_adapter 2 | from .utils import SQLParseException, SQLLoadException 3 | from importlib.metadata import version 4 | 5 | __version__ = version("aiosql") 6 | 7 | __all__ = ["from_path", "from_str", "register_adapter", "SQLParseException", "SQLLoadException"] 8 | -------------------------------------------------------------------------------- /tests/blogdb/sql/comments/du/comments.sql: -------------------------------------------------------------------------------- 1 | -- name: create-table# 2 | create sequence comments_seq; 3 | create table comments ( 4 | commentid integer primary key, 5 | blogid integer not null, 6 | author text not null, 7 | content text not null, 8 | foreign key(blogid) references blogs(blogid) 9 | ); 10 | -------------------------------------------------------------------------------- /example/greetings.sql: -------------------------------------------------------------------------------- 1 | -- name: get_all_greetings 2 | -- Get all the greetings in the database 3 | select greeting_id, greeting 4 | from greetings 5 | order by 1; 6 | 7 | -- name: get_user_by_username^ 8 | -- Get a user from the database using a named parameter 9 | select user_id, username, name 10 | from users 11 | where username = :username; 12 | -------------------------------------------------------------------------------- /tests/blogdb/sql/misc/pg/misc.sql: -------------------------------------------------------------------------------- 1 | -- name: get_now_date_time()$ 2 | SELECT to_char(NOW(), 'YYYY-MM-DD HH24:MI:SS') AS now; 3 | 4 | -- name: escape-simple-quotes()$ 5 | SELECT '''doubled'' single quotes' as """doubled"" double quotes" 6 | 7 | -- name: get-modulo(numerator, denominator)$ 8 | -- %-escaped percent modulo operator 9 | SELECT :numerator %% :denominator AS modulo; 10 | -------------------------------------------------------------------------------- /tests/pytest.ini: -------------------------------------------------------------------------------- 1 | [pytest] 2 | asyncio_default_fixture_loop_scope = function 3 | # asyncio: asynchronous tests 4 | markers = 5 | sqlite3: tests relying on sqlite 6 | postgres: tests relying on postgres 7 | mariadb: tests relying on mariadb 8 | mssql: tests relying on mssql 9 | mysql: tests relying on mysql 10 | duckdb: tests relying on duckdb 11 | misc: other tests 12 | -------------------------------------------------------------------------------- /aiosql/adapters/apyformat.py: -------------------------------------------------------------------------------- 1 | from .ageneric import AsyncGenericAdapter 2 | from .pyformat import _replacer 3 | from ..utils import VAR_REF 4 | 5 | 6 | class AsyncPyFormatAdapter(AsyncGenericAdapter): 7 | """Convert from named to pyformat parameter style.""" 8 | 9 | def process_sql(self, query_name, op_type, sql): 10 | """From named to pyformat.""" 11 | return VAR_REF.sub(_replacer, sql) 12 | -------------------------------------------------------------------------------- /aiosql/adapters/__init__.py: -------------------------------------------------------------------------------- 1 | # standard adapters 2 | from .pyformat import PyFormatAdapter 3 | from .generic import GenericAdapter 4 | from .sqlite3 import SQLite3Adapter 5 | 6 | # async adapters 7 | from .aiosqlite import AioSQLiteAdapter 8 | from .asyncpg import AsyncPGAdapter 9 | 10 | # silence flake8 F401 warning: 11 | _ALL = [ 12 | PyFormatAdapter, 13 | GenericAdapter, 14 | SQLite3Adapter, 15 | AioSQLiteAdapter, 16 | AsyncPGAdapter, 17 | ] 18 | -------------------------------------------------------------------------------- /tests/blogdb/sql/users/ms/users.sql: -------------------------------------------------------------------------------- 1 | -- name: get-all 2 | -- MS SQL Server does not do an implicit "AS" on * 3 | select 4 | userid as userid, 5 | username as username, 6 | firstname as firstname, 7 | lastname as lastname 8 | from users 9 | order by 1; 10 | 11 | -- name: get-by-username^ 12 | select userid as userid, 13 | username as username, 14 | firstname as firstname, 15 | lastname as lastname 16 | from users 17 | where username = :username; 18 | -------------------------------------------------------------------------------- /tests/conf_sqlite.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from pathlib import Path 3 | from conf_schema import create_user_blogs, fill_user_blogs, drop_user_blogs 4 | import utils 5 | 6 | @pytest.fixture 7 | def li_dbpath(tmpdir): 8 | db_path = str(Path(tmpdir.strpath) / "blogdb.db") 9 | yield db_path 10 | 11 | @pytest.fixture 12 | def li_db(rconn, queries): 13 | create_user_blogs(rconn, queries) 14 | fill_user_blogs(rconn, queries) 15 | yield rconn 16 | drop_user_blogs(rconn, queries) 17 | -------------------------------------------------------------------------------- /example/sql/create_schema.sql: -------------------------------------------------------------------------------- 1 | -- name: create-schema# 2 | create table users ( 3 | userid integer not null primary key, 4 | username text not null, 5 | firstname integer not null, 6 | lastname text not null 7 | ); 8 | 9 | create table blogs ( 10 | blogid integer not null primary key, 11 | userid integer not null, 12 | title text not null, 13 | content text not null, 14 | published date not null default CURRENT_DATE, 15 | foreign key(userid) references users(userid) 16 | ); 17 | -------------------------------------------------------------------------------- /example/greetings.py: -------------------------------------------------------------------------------- 1 | import aiosql 2 | import sqlite3 3 | 4 | queries = aiosql.from_path("greetings.sql", "sqlite3") 5 | 6 | with sqlite3.connect("greetings.db") as conn: 7 | user = queries.get_user_by_username(conn, username="willvaughn") 8 | # user: (1, "willvaughn", "William") 9 | 10 | for _, greeting in queries.get_all_greetings(conn): 11 | # scan: (1, "Hi"), (2, "Aloha"), (3, "Hola"), … 12 | print(f"{greeting}, {user[2]}!") 13 | # Hi, William! 14 | # Aloha, William! 15 | # … 16 | -------------------------------------------------------------------------------- /.editorconfig: -------------------------------------------------------------------------------- 1 | # Use as the top-most EditorConfig file 2 | root = true 3 | 4 | # Unix-style newlines with a newline ending every file 5 | [*] 6 | end_of_line = lf 7 | insert_final_newline = true 8 | trim_trailing_whitespace = true 9 | 10 | [*.py] 11 | charset = utf-8 12 | indent_style = space 13 | indent_size = 4 14 | 15 | [*.{md,markdown}] 16 | indent_style = space 17 | indent_size = 4 18 | trim_trailing_whitespace = false 19 | 20 | # 2 space indentation 21 | [*.{json,yml,sh}] 22 | indent_style = space 23 | indent_size = 2 24 | -------------------------------------------------------------------------------- /example/greetings.sh: -------------------------------------------------------------------------------- 1 | #! /bin/bash 2 | 3 | echo "# creating sqlite greetings database…" 4 | sqlite3 greetings.db < greetings_create.sql 5 | 6 | echo "# running standard aiosql example code…" 7 | python greetings.py 8 | 9 | echo "# running async aiosql example code…" 10 | python greetings_async.py 11 | 12 | echo "# running cursor aiosql example code…" 13 | python greetings_cursor.py 14 | 15 | echo "# running execute values and observer…" 16 | python observe_query.py 17 | 18 | echo "# removing greetings database." 19 | rm greetings.db 20 | -------------------------------------------------------------------------------- /docker/dockerfile.python-postgres: -------------------------------------------------------------------------------- 1 | FROM ubuntu 2 | LABEL description="Python setup for AioSQL Postgres testing" 3 | RUN apt update 4 | RUN apt install -y --no-install-recommends \ 5 | python-is-python3 python3-venv python3-dev gcc make curl pkg-config 6 | RUN python -m venv /venv 7 | RUN /venv/bin/pip install asyncio pytest pytest-asyncio coverage 8 | WORKDIR /code 9 | # postgres specific packages 10 | RUN apt install -y --no-install-recommends libpq-dev 11 | RUN apt clean 12 | RUN /venv/bin/pip install psycopg psycopg2 pygresql pg8000 asyncpg pytest-postgresql 13 | -------------------------------------------------------------------------------- /docker/dockerfile.python-mysql: -------------------------------------------------------------------------------- 1 | FROM ubuntu 2 | LABEL description="Python setup for AioSQL MySQL testing" 3 | RUN apt update 4 | RUN apt install -y --no-install-recommends \ 5 | python-is-python3 python3-venv python3-dev gcc make curl pkg-config 6 | RUN python -m venv /venv 7 | RUN /venv/bin/pip install asyncio pytest pytest-asyncio coverage 8 | WORKDIR /code 9 | # mysql specific packages 10 | RUN apt install -y --no-install-recommends libmysqlclient-dev 11 | RUN apt clean 12 | RUN /venv/bin/pip install cryptography mysqlclient mysql-connector-python pymysql pytest-mysql 13 | -------------------------------------------------------------------------------- /example/greetings_async.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import aiosql 3 | import aiosqlite 4 | 5 | queries = aiosql.from_path("greetings.sql", "aiosqlite") 6 | 7 | async def main(): 8 | # Parallel queries!!! 9 | async with aiosqlite.connect("greetings.db") as conn: 10 | greetings, user = await asyncio.gather( 11 | queries.get_all_greetings(conn), 12 | queries.get_user_by_username(conn, username="willvaughn") 13 | ) 14 | 15 | for _, greeting in greetings: 16 | print(f"{greeting}, {user[2]}!") 17 | 18 | asyncio.run(main()) 19 | -------------------------------------------------------------------------------- /example/greetings_create.sql: -------------------------------------------------------------------------------- 1 | DROP TABLE IF EXISTS greetings; 2 | DROP TABLE IF EXISTS users; 3 | 4 | CREATE TABLE greetings( 5 | greeting_id INTEGER PRIMARY KEY, 6 | greeting TEXT NOT NULL 7 | ); 8 | 9 | INSERT INTO greetings(greeting_id, greeting) VALUES 10 | (1, 'Hi'), 11 | (2, 'Aloha'), 12 | (3, 'Hola'), 13 | (4, 'Bonjour'), 14 | (5, '你好'); 15 | 16 | CREATE TABLE users( 17 | user_id INTEGER PRIMARY KEY, 18 | username TEXT UNIQUE NOT NULL, 19 | name TEXT NOT NULL 20 | ); 21 | 22 | INSERT INTO users(user_id, username, name) VALUES 23 | (1, 'willvaughn', 'William'), 24 | (2, 'calvin', 'Fabien'); 25 | -------------------------------------------------------------------------------- /aiosql/adapters/sqlite3.py: -------------------------------------------------------------------------------- 1 | from .generic import GenericAdapter 2 | 3 | 4 | class SQLite3Adapter(GenericAdapter): 5 | """SQLite3 Adapter for AioSQL. 6 | 7 | Overwrites two methods using sqlite3-specific non-standard methods. 8 | """ 9 | 10 | def insert_returning(self, conn, query_name, sql, parameters): 11 | cur = self._cursor(conn) 12 | try: 13 | cur.execute(sql, parameters) 14 | results = cur.lastrowid 15 | finally: 16 | cur.close() 17 | return results 18 | 19 | def execute_script(self, conn, sql): 20 | conn.executescript(sql) 21 | return "DONE" 22 | -------------------------------------------------------------------------------- /tests/conf_duckdb.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from pathlib import Path 3 | from conf_schema import create_user_blogs, fill_user_blogs, drop_user_blogs 4 | 5 | try: 6 | import duckdb 7 | 8 | @pytest.fixture 9 | def duckdb_conn(queries): 10 | # db_path = str(Path(tmpdir.strpath) / "blogdb.duck.db") 11 | conn = duckdb.connect(":memory:") 12 | create_user_blogs(conn, queries) 13 | fill_user_blogs(conn, queries) 14 | yield conn 15 | drop_user_blogs(conn, queries) 16 | 17 | except ModuleNotFoundError: 18 | 19 | @pytest.fixture 20 | def duckdb_conn(): 21 | raise Exception("unimplemented fixture") 22 | -------------------------------------------------------------------------------- /aiosql/adapters/pyformat.py: -------------------------------------------------------------------------------- 1 | from .generic import GenericAdapter 2 | from ..utils import VAR_REF 3 | 4 | 5 | def _replacer(ma): 6 | """Regex hook for named to pyformat conversion.""" 7 | gd = ma.groupdict() 8 | if gd["dquote"] is not None: # "..." 9 | return gd["dquote"] 10 | elif gd["squote"] is not None: # '...' 11 | return gd["squote"] 12 | else: # :something to %(something)s 13 | return f'{gd["lead"]}%({gd["var_name"]})s' 14 | 15 | 16 | class PyFormatAdapter(GenericAdapter): 17 | """Convert from named to pyformat parameter style.""" 18 | 19 | def process_sql(self, query_name, op_type, sql): 20 | """From named to pyformat.""" 21 | return VAR_REF.sub(_replacer, sql) 22 | -------------------------------------------------------------------------------- /.github/scripts/docs.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | set -euo pipefail 4 | 5 | # add sphinx specific entries when generating the doc so that pypi will not complain 6 | 7 | cat >> docs/source/index.rst < 18 | Defining SQL Queries 19 | Advanced Topics 20 | Database Driver Adapters 21 | Contributing 22 | API 23 | Versions 24 | Backlog 25 | EOF 26 | 27 | sphinx-apidoc -f -o docs/source/pydoc aiosql 28 | sphinx-build -b html docs/source docs/build 29 | -------------------------------------------------------------------------------- /example/sql/blogs/blogs.sql: -------------------------------------------------------------------------------- 1 | -- name: get_all_blogs 2 | select blogid, 3 | userid, 4 | title, 5 | content, 6 | published 7 | from blogs; 8 | 9 | -- name: publish_blog`_ Clojure library as support for multiple 14 | kind of substitutions, maybe we could do the same. 15 | 16 | For instance for identifiers: 17 | 18 | .. code:: sql 19 | 20 | -- name: select(cols, table) 21 | SELECT :i*:cols FROM :i:table ORDER BY 1; 22 | 23 | .. code:: python 24 | 25 | res = db.select(conn, cols=["uid", "name"], table="users") 26 | 27 | This would require separating identifiers management and to build 28 | and memoize the query variants? 29 | How much help for is there from drivers? 30 | -------------------------------------------------------------------------------- /aiosql/utils.py: -------------------------------------------------------------------------------- 1 | import re 2 | import logging 3 | 4 | # FIXME to be improved 5 | VAR_REF = re.compile( 6 | # NOTE probably pg specific? 7 | r'(?P"(""|[^"])+")|' 8 | # FIXME mysql/mariadb use backslash escapes 9 | r"(?P\'(\'\'|[^\'])*\')|" 10 | # NOTE beware of overlapping re 11 | r"(?P[^:]):(?P\w+)(?=[^:]?)" 12 | ) 13 | """Pattern to identify colon-variables (aka _named_ style) in SQL code""" 14 | 15 | # NOTE see comments above 16 | VAR_REF_DOT = re.compile( 17 | r'(?P"(""|[^"])+")|' 18 | r"(?P\'(\'\'|[^\'])*\')|" 19 | r"(?P[^:]):(?P\w+\.\w+)(?=[^:]?)" 20 | ) 21 | """Pattern to identify colon-variables with a simple attribute in SQL code.""" 22 | 23 | log = logging.getLogger("aiosql") 24 | """Shared package logging.""" 25 | # log.setLevel(logging.DEBUG) 26 | 27 | 28 | class SQLLoadException(Exception): 29 | """Raised when there is a problem loading SQL content from a file or directory""" 30 | 31 | pass 32 | 33 | 34 | class SQLParseException(Exception): 35 | """Raised when there was a problem parsing the aiosql comment annotations in SQL""" 36 | 37 | pass 38 | -------------------------------------------------------------------------------- /tests/blogdb/sql/blogs/my/blogs.sql: -------------------------------------------------------------------------------- 1 | -- name: create-table-users# 2 | CREATE TABLE IF NOT EXISTS users( 3 | userid INTEGER auto_increment PRIMARY KEY, 4 | username TEXT NOT NULL, 5 | firstname TEXT NOT NULL, 6 | lastname TEXT NOT NULL 7 | ); 8 | 9 | -- name: create-table-blogs# 10 | CREATE TABLE IF NOT EXISTS blogs( 11 | blogid INTEGER auto_increment PRIMARY KEY, 12 | userid INTEGER NOT NULL REFERENCES users, 13 | title TEXT NOT NULL, 14 | content TEXT NOT NULL, 15 | published DATE NOT NULL DEFAULT (CURRENT_DATE) 16 | ); 17 | 18 | -- name: get-blogs-published-after 19 | -- Get all blogs by all authors published after the given date. 20 | select b.title, 21 | u.username, 22 | DATE_FORMAT(b.published, '%Y-%m-%d %H:%i') as published 23 | from blogs b 24 | inner join users u on b.userid = u.userid 25 | where b.published >= :published 26 | order by b.published desc; 27 | 28 | 29 | -- name: bulk-publish*! 30 | -- Insert many blogs at once 31 | insert into blogs ( 32 | userid, 33 | title, 34 | content, 35 | published 36 | ) 37 | values (%s, %s, %s, %s); 38 | 39 | -- name: publish-new-blog 40 | insert into blogs (userid, title, content) 41 | values (:userid, :title, :contents) 42 | returning blogid, title; 43 | -------------------------------------------------------------------------------- /tests/blogdb/sql/blogs/pg/blogs.sql: -------------------------------------------------------------------------------- 1 | -- name: create-table-users# 2 | CREATE TABLE IF NOT EXISTS users( 3 | userid SERIAL PRIMARY KEY, 4 | username TEXT NOT NULL, 5 | firstname TEXT NOT NULL, 6 | lastname TEXT NOT NULL 7 | ); 8 | 9 | -- name: create-table-blogs# 10 | CREATE TABLE IF NOT EXISTS blogs( 11 | blogid SERIAL PRIMARY KEY, 12 | userid INTEGER NOT NULL REFERENCES users, 13 | title TEXT NOT NULL, 14 | content TEXT NOT NULL, 15 | published DATE NOT NULL DEFAULT CURRENT_DATE 16 | ); 17 | 18 | -- name: get-blogs-published-after 19 | -- Get all blogs by all authors published after the given date. 20 | select title, 21 | username, 22 | to_char(published, 'YYYY-MM-DD HH24:MI') as "published" 23 | from blogs 24 | join users using(userid) 25 | where published >= :published 26 | order by published desc; 27 | 28 | 29 | -- name: publish-blog= :published 28 | order by b.published desc; 29 | 30 | 31 | -- name: bulk-publish*! 32 | -- Insert many blogs at once 33 | insert into blogs ( 34 | blogid, 35 | userid, 36 | title, 37 | content, 38 | published 39 | ) 40 | values (nextval('blogs_seq'), ?, ?, ?, ?); 41 | 42 | -- name: publish-blog&2 ; exit 1 ; } 38 | echo "# starting up…" 39 | $(DOCKER) compose up -d 40 | # wait and show results, in probable completion order 41 | # $(DOCKER) container logs -f $(NAME)-dbs-client-1 42 | $(DOCKER) container wait $(NAME)-dbs-client-1 43 | $(DOCKER) container logs $(NAME)-dbs-client-1 44 | # $(DOCKER) container logs -f $(NAME)-my-client-1 45 | $(DOCKER) container wait $(NAME)-my-client-1 46 | $(DOCKER) container logs $(NAME)-my-client-1 47 | # $(DOCKER) container logs -f $(NAME)-pg-client-1 48 | $(DOCKER) container wait $(NAME)-pg-client-1 49 | $(DOCKER) container logs $(NAME)-pg-client-1 50 | echo "# shutting down…" 51 | $(DOCKER) compose down -v 52 | 53 | .PHONY: docker.pytest 54 | docker.pytest: 55 | export TEST=pytest 56 | $(MAKE) docker.run 57 | 58 | .PHONY: docker.coverage 59 | docker.coverage: 60 | export TEST=coverage 61 | $(MAKE) docker.run 62 | -------------------------------------------------------------------------------- /tests/test_myco.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | import aiosql 3 | import pytest 4 | import run_tests as t 5 | import utils as u 6 | 7 | try: 8 | import mysql.connector as db 9 | except ModuleNotFoundError: 10 | pytest.skip("missing driver: mysql.connector (mysql-connector)", allow_module_level=True) 11 | 12 | pytestmark = [ 13 | pytest.mark.mysql, 14 | pytest.mark.skipif(not u.has_pkg("pytest_mysql"), reason="no pytest_mysql"), 15 | ] 16 | 17 | @pytest.fixture(scope="module") 18 | def driver(): 19 | return "mysql-connector" 20 | 21 | @pytest.fixture(scope="module") 22 | def date(): 23 | return datetime.date 24 | 25 | @pytest.fixture 26 | def conn(my_db): 27 | return my_db 28 | 29 | def test_my_dsn(my_dsn): 30 | assert "user" in my_dsn and "host" in my_dsn and "port" in my_dsn 31 | 32 | def test_my_conn(conn): 33 | assert conn.__module__.startswith(db.__name__) 34 | t.run_something(conn) 35 | 36 | from run_tests import ( 37 | run_sanity as test_sanity, 38 | run_something as test_something, 39 | run_cursor as test_cursor, 40 | # run_record_query as test_record_query, 41 | # run_parameterized_record_query as test_parameterized_record_query, 42 | run_parameterized_query as test_parameterized_query, 43 | run_record_class_query as test_record_class_query, 44 | run_select_cursor_context_manager as test_select_cursor_context_manager, 45 | run_select_one as test_select_one, 46 | # run_insert_returning as test_insert_returning, 47 | run_delete as test_delete, 48 | run_insert_many as test_insert_many, 49 | run_select_value as test_select_value, 50 | run_date_time as test_date_time, 51 | run_object_attributes as test_object_attributes, 52 | run_execute_script as test_execute_script, 53 | run_modulo as test_modulo, 54 | ) 55 | -------------------------------------------------------------------------------- /tests/test_mysqldb.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | import aiosql 3 | import pytest 4 | import run_tests as t 5 | import utils as u 6 | 7 | try: 8 | import MySQLdb as db 9 | except ModuleNotFoundError: 10 | pytest.skip("missing driver: MySQLdb (mysqlclient)", allow_module_level=True) 11 | 12 | pytestmark = [ 13 | pytest.mark.mysql, 14 | pytest.mark.skipif(not u.has_pkg("pytest_mysql"), reason="no pytest_mysql"), 15 | ] 16 | 17 | @pytest.fixture(scope="module") 18 | def driver(): 19 | return "mysqldb" 20 | 21 | @pytest.fixture(scope="module") 22 | def date(): 23 | return datetime.date 24 | 25 | @pytest.fixture 26 | def conn(my_db): 27 | return my_db 28 | 29 | def test_my_dsn(my_dsn): 30 | assert "user" in my_dsn and "host" in my_dsn and "port" in my_dsn 31 | 32 | def test_my_conn(conn): 33 | assert conn.__module__.startswith(db.__name__) 34 | t.run_something(conn) 35 | 36 | from run_tests import ( 37 | run_sanity as test_sanity, 38 | run_something as test_something, 39 | run_cursor as test_cursor, 40 | # FIXME 41 | # run_record_query as test_record_query, 42 | # run_parameterized_record_query as test_parameterized_record_query, 43 | run_parameterized_query as test_parameterized_query, 44 | run_record_class_query as test_record_class_query, 45 | run_select_cursor_context_manager as test_select_cursor_context_manager, 46 | run_select_one as test_select_one, 47 | # run_insert_returning as test_insert_returning, 48 | run_delete as test_delete, 49 | run_insert_many as test_insert_many, 50 | run_select_value as test_select_value, 51 | run_date_time as test_date_time, 52 | run_object_attributes as test_object_attributes, 53 | run_execute_script as test_execute_script, 54 | # FIXME kwargs -> args? 55 | # run_modulo as test_modulo, 56 | ) 57 | -------------------------------------------------------------------------------- /tests/blogdb/sql/blogs/ms/blogs.sql: -------------------------------------------------------------------------------- 1 | -- name: create-table-users# 2 | CREATE TABLE users( 3 | userid INTEGER IDENTITY(1, 1) PRIMARY KEY, 4 | username VARCHAR(MAX) NOT NULL, 5 | firstname VARCHAR(MAX) NOT NULL, 6 | lastname VARCHAR(MAX) NOT NULL 7 | ); 8 | 9 | -- name: create-table-blogs# 10 | CREATE TABLE blogs( 11 | blogid INTEGER IDENTITY(1, 1) PRIMARY KEY, 12 | userid INTEGER NOT NULL REFERENCES users, 13 | title VARCHAR(MAX) NOT NULL, 14 | content VARCHAR(MAX) NOT NULL, 15 | published DATE NOT NULL DEFAULT (GETDATE()) 16 | ); 17 | 18 | -- name: drop-table-comments# 19 | IF OBJECT_ID('comments', 'U') IS NOT NULL 20 | DROP TABLE comments; 21 | 22 | -- name: drop-table-blogs# 23 | IF OBJECT_ID('blogs', 'U') IS NOT NULL 24 | DROP TABLE blogs; 25 | 26 | -- name: drop-table-users# 27 | IF OBJECT_ID('users', 'U') IS NOT NULL 28 | DROP TABLE users; 29 | 30 | -- name: get-blogs-published-after 31 | -- Get all blogs by all authors published after the given date. 32 | select title, 33 | username, 34 | concat(convert(VARCHAR, published, 23), ' 00:00') as "published" 35 | from blogs as b 36 | join users as u ON (u.userid = b.userid) 37 | where published >= :published 38 | order by published desc; 39 | 40 | 41 | -- name: publish-blog= :published 26 | order by b.published desc; 27 | 28 | 29 | -- name: bulk-publish*! 30 | -- Insert many blogs at once 31 | insert into blogs ( 32 | userid, 33 | title, 34 | content, 35 | published 36 | ) 37 | values (?, ?, ?, ?); 38 | 39 | -- name: publish-a-blog 80 | make VENV=/venv check.$TEST.mariadb.detached check.$TEST.mssql.detached check.$TEST.misc 81 | -------------------------------------------------------------------------------- /docs/source/advanced-topics.rst: -------------------------------------------------------------------------------- 1 | Advanced Topics 2 | =============== 3 | 4 | Accessing the ``cursor`` object 5 | ------------------------------- 6 | 7 | The cursor is a temporary object created in memory that allows you to perform 8 | row-by-row operations on your data and use handy methods such as 9 | ``.description``, ``.fetchall()`` and ``.fetchone()``. 10 | As long as you are running a SQL ``SELECT`` query, you can access the cursor 11 | object by appending ``_cursor`` to the end of the queries name. 12 | 13 | For example, say you have the following query named ``get-all-greetings`` in a ``sql`` file: 14 | 15 | .. literalinclude:: ../../example/greetings.sql 16 | :language: sql 17 | :lines: 1-5 18 | 19 | With this query, you can get all ``greeting_id``'s and ``greeting``'s, access 20 | the cursor object, and print the column names with the following code: 21 | 22 | .. literalinclude:: ../../example/greetings_cursor.py 23 | :language: python 24 | 25 | Accessing prepared SQL as a string 26 | ---------------------------------- 27 | 28 | When you need to do something not directly supported by aiosql, this is your 29 | escape hatch. 30 | You can still define your SQL in a file and load it with aiosql, but then you 31 | may choose to use it without calling your aiosql method. 32 | The prepared SQL string of a method is available as an attribute of each method 33 | ``queries..sql``. 34 | Here's an example of how you might use it with a unique feature of ``psycopg2`` like 35 | `execute_values `__. 36 | 37 | .. literalinclude:: ../../example/pg_execute_values.py 38 | :language: python 39 | 40 | Accessing the SQL Operation Type 41 | -------------------------------- 42 | 43 | Query functions also provide access to the SQL operation type you define in 44 | your library. 45 | This can be useful for observability (such as metrics, tracing, or logging), or 46 | customizing how you manage different operations within your codebase. Extending 47 | from the above example: 48 | 49 | .. literalinclude:: ../../example/observe_query.py 50 | :language: python 51 | -------------------------------------------------------------------------------- /tests/test_mariadb.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | import aiosql 3 | import pytest 4 | import run_tests as t 5 | 6 | try: 7 | import mariadb as db 8 | except ModuleNotFoundError: 9 | pytest.skip("missing driver: mariadb", allow_module_level=True) 10 | 11 | pytestmark = [ 12 | pytest.mark.mariadb, 13 | # FIXME this should run in detached mode!? 14 | # pytest.mark.skipif(not u.has_pkg("pytest_mysql"), reason="no pytest_mysql"), 15 | ] 16 | 17 | @pytest.fixture(scope="module") 18 | def driver(): 19 | return "mariadb" 20 | 21 | @pytest.fixture(scope="module") 22 | def date(): 23 | return datetime.date 24 | 25 | @pytest.fixture 26 | def conn(my_db): 27 | return my_db 28 | 29 | @pytest.fixture 30 | def dconn(my_db): 31 | return my_db 32 | 33 | def test_my_dsn(my_dsn): 34 | assert "user" in my_dsn and "host" in my_dsn and "port" in my_dsn 35 | assert "dbname" not in my_dsn and "database" in my_dsn 36 | 37 | from run_tests import ( 38 | run_sanity as test_sanity, 39 | run_something as test_something, 40 | run_cursor as test_cursor, 41 | # run_record_query as test_record_query, 42 | # run_parameterized_record_query as test_parameterized_record_query, 43 | run_record_class_query as test_record_class_query, 44 | run_parameterized_query as test_parameterized_query, 45 | run_select_cursor_context_manager as test_select_cursor_context_manager, 46 | run_select_one as test_select_one, 47 | # FIXME should work 48 | # run_insert_returning as test_insert_returning, 49 | run_delete as test_delete, 50 | run_insert_many as test_insert_many, 51 | run_select_value as test_select_value, 52 | run_date_time as test_date_time, 53 | run_object_attributes as test_object_attributes, 54 | run_execute_script as test_execute_script, 55 | run_modulo as test_modulo, 56 | ) 57 | 58 | def test_record_query(dconn, queries): 59 | queries._queries.driver_adapter._kwargs = {"dictionary": True} 60 | return t.run_record_query(dconn, queries) 61 | 62 | def test_parameterized_record_query(dconn, queries, date): 63 | queries._queries.driver_adapter._kwargs = {"dictionary": True} 64 | return t.run_parameterized_record_query(dconn, queries, date) 65 | -------------------------------------------------------------------------------- /tests/utils.py: -------------------------------------------------------------------------------- 1 | import shutil 2 | import importlib 3 | import logging 4 | import time 5 | import contextlib 6 | import asyncio 7 | 8 | log = logging.getLogger("pytest-aiosql") 9 | logging.basicConfig(level=logging.INFO) 10 | 11 | def has_cmd(cmd): 12 | return shutil.which(cmd) is not None 13 | 14 | def has_pkg(pkg): 15 | """Tell whether a module is available.""" 16 | try: 17 | importlib.import_module(pkg) 18 | return True 19 | except ModuleNotFoundError: 20 | return False 21 | 22 | def has_service(host="localhost", port=22, retry=1): 23 | """Tell whether a service (host port) is available.""" 24 | import socket 25 | 26 | while retry > 0: 27 | retry -= 1 28 | try: 29 | tcp_ip = socket.socket(socket.AF_INET, socket.SOCK_STREAM) 30 | tcp_ip.settimeout(1) 31 | res = tcp_ip.connect_ex((host, port)) 32 | if res == 0: 33 | return True 34 | if retry > 0: 35 | time.sleep(3) 36 | except Exception as e: 37 | log.info(f"connection to {(host, port)} failed: {e}") 38 | if retry > 0: 39 | time.sleep(3) 40 | finally: 41 | tcp_ip.close() 42 | return False 43 | 44 | @contextlib.contextmanager 45 | def db_connect(db, tries, *args, **kwargs): 46 | """Return an auto-closing database connection, possibly with several attempts.""" 47 | fails, done = 0, False 48 | while not done and fails < tries: 49 | try: 50 | with db.connect(*args, **kwargs) as conn: 51 | done = True 52 | yield conn 53 | except Exception as e: 54 | fails += 1 55 | log.warning(f"{db.__name__} connection failed ({fails}): {e}") 56 | time.sleep(1.0) 57 | if not done: 58 | log.error(f"failed to connect after {tries} attempts") 59 | 60 | def dict_factory(cursor, row): 61 | d = {} 62 | for idx, col in enumerate(cursor.description): 63 | d[col[0]] = row[idx] 64 | return d 65 | 66 | def run_async(awaitable): 67 | loop = asyncio.get_event_loop() 68 | return loop.run_until_complete(awaitable) 69 | -------------------------------------------------------------------------------- /.github/workflows/aiosql-package.yml: -------------------------------------------------------------------------------- 1 | # This workflow will install Python dependencies, run tests and lint with a variety of Python versions 2 | # For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions 3 | 4 | name: Aiosql Package 5 | 6 | on: 7 | push: 8 | branches: [ main ] 9 | pull_request: 10 | branches: [ main ] 11 | 12 | jobs: 13 | build: 14 | runs-on: ubuntu-latest 15 | strategy: 16 | fail-fast: false 17 | matrix: 18 | # https://github.com/actions/python-versions (versions-manifest.json) 19 | # https://downloads.python.org/pypy/versions.json 20 | # 3.14 KO on pydantic build dependences 2024-10-27 21 | # 3.14 way too slow duckdb wheel build 2025-01-28 22 | python: 23 | - version: "pypy3.10" 24 | - version: "pypy3.11" 25 | - version: "3.10" 26 | - version: "3.11" 27 | - version: "3.12" 28 | - version: "3.13" 29 | - version: "3.13t" 30 | gil: 1 31 | - version: "3.13t" 32 | gil: 0 33 | - version: "3.14" 34 | - version: "3.14t" 35 | gil: 1 36 | - version: "3.14t" 37 | gil: 0 38 | env: 39 | PYTHON_GIL: ${{ matrix.python.gil }} 40 | steps: 41 | - name: Checkout Project 42 | uses: actions/checkout@v6 43 | - name: Set up Python ${{ matrix.python.version }} 44 | uses: actions/setup-python@v6 45 | with: 46 | python-version: ${{ matrix.python.version }} 47 | allow-prereleases: true 48 | cache: "pip" 49 | - name: Install dependencies 50 | run: | 51 | python -m pip install --upgrade pip 52 | python -m pip install .[dev,dev-postgres,dev-mysql,dev-sqlite,dev-duckdb] 53 | - name: Check types with mypy 54 | run: make VENV= INSTALL= check.mypy 55 | - name: Check types with pyright 56 | run: make VENV= INSTALL= check.pyright 57 | - name: Lint with ruff 58 | run: make VENV= INSTALL= check.ruff 59 | - name: Test with pytest and databases 60 | run: make VENV= INSTALL= check.pytest 61 | - name: Coverage tests 62 | run: make VENV= INSTALL= check.coverage 63 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Obvious 2 | .git 3 | 4 | # Swap 5 | [._]*.s[a-v][a-z] 6 | [._]*.sw[a-p] 7 | [._]s[a-rt-v][a-z] 8 | [._]ss[a-gi-z] 9 | [._]sw[a-p] 10 | 11 | # Session 12 | Session.vim 13 | 14 | # Temporary 15 | .netrwhist 16 | *~ 17 | # Auto-generated tag files 18 | tags 19 | # Persistent undo 20 | [._]*.un~ 21 | 22 | # Byte-compiled / optimized / DLL files 23 | __pycache__/ 24 | *.py[cod] 25 | *$py.class 26 | 27 | # C extensions 28 | *.so 29 | 30 | # Distribution / packaging 31 | .Python 32 | build/ 33 | develop-eggs/ 34 | dist/ 35 | downloads/ 36 | eggs/ 37 | .eggs/ 38 | lib/ 39 | lib64/ 40 | parts/ 41 | sdist/ 42 | var/ 43 | wheels/ 44 | *.egg-info/ 45 | .installed.cfg 46 | *.egg 47 | MANIFEST 48 | docs/html 49 | 50 | # PyInstaller 51 | # Usually these files are written by a python script from a template 52 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 53 | *.manifest 54 | *.spec 55 | 56 | # Installer logs 57 | pip-log.txt 58 | pip-delete-this-directory.txt 59 | 60 | # Unit test / coverage reports 61 | htmlcov/ 62 | .tox/ 63 | .coverage 64 | .coverage.* 65 | .cache 66 | nosetests.xml 67 | coverage.xml 68 | *.cover 69 | .hypothesis/ 70 | .pytest_cache/ 71 | 72 | # Translations 73 | *.mo 74 | *.pot 75 | 76 | # Django stuff: 77 | *.log 78 | local_settings.py 79 | db.sqlite3 80 | 81 | # Flask stuff: 82 | instance/ 83 | .webassets-cache 84 | 85 | # Scrapy stuff: 86 | .scrapy 87 | 88 | # Sphinx documentation 89 | docs/build/ 90 | docs/source/pydoc/ 91 | 92 | # PyBuilder 93 | target/ 94 | 95 | # Jupyter Notebook 96 | .ipynb_checkpoints 97 | 98 | # pyenv 99 | .python-version 100 | 101 | # celery beat schedule file 102 | celerybeat-schedule 103 | 104 | # SageMath parsed files 105 | *.sage.py 106 | 107 | # Environments 108 | .env 109 | .venv 110 | env/ 111 | venv/ 112 | ENV/ 113 | env.bak/ 114 | venv.bak/ 115 | poetry.lock 116 | 117 | # Spyder project settings 118 | .spyderproject 119 | .spyproject 120 | 121 | # Rope project settings 122 | .ropeproject 123 | 124 | # mypy 125 | .mypy_cache/ 126 | 127 | # PyCharm 128 | .idea/ 129 | 130 | # Project files 131 | scratch/ 132 | exampleblog.db 133 | 134 | # misc 135 | .docker.* 136 | -------------------------------------------------------------------------------- /tests/test_apsycopg3.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | import aiosql 3 | import pytest 4 | import pytest_asyncio 5 | import run_tests as t 6 | import utils as u 7 | import copy 8 | 9 | try: 10 | import psycopg as db 11 | from psycopg.rows import dict_row 12 | except ModuleNotFoundError: 13 | pytest.skip("missing driver: psycopg", allow_module_level=True) 14 | 15 | pytestmark = [ 16 | pytest.mark.postgres, 17 | pytest.mark.skipif(not u.has_pkg("pytest_postgresql"), reason="no pytest_postgresql"), 18 | pytest.mark.skipif(not u.has_pkg("pytest_asyncio"), reason="no pytest_asyncio"), 19 | ] 20 | 21 | @pytest.fixture(scope="module") 22 | def driver(): 23 | return "apsycopg" 24 | 25 | @pytest.fixture(scope="module") 26 | def date(): 27 | return datetime.date 28 | 29 | # FIXME merge? 30 | 31 | # raw connection without database data 32 | @pytest_asyncio.fixture 33 | async def rconn(pg_params): 34 | conn = await db.AsyncConnection.connect(**pg_params) 35 | yield conn 36 | await conn.close() 37 | 38 | # asynchronous tuple connection with database data 39 | @pytest_asyncio.fixture 40 | async def aconn(rconn, pg_db): 41 | yield rconn 42 | 43 | # asynchronous dict connection with database data 44 | @pytest_asyncio.fixture 45 | async def dconn(pg_params, pg_db): 46 | params = copy.copy(pg_params) 47 | params["row_factory"] = dict_row 48 | conn = await db.AsyncConnection.connect(**params) 49 | yield conn 50 | await conn.close() 51 | 52 | from run_tests import ( 53 | run_async_sanity as test_sanity, 54 | run_async_record_query as test_record_query, 55 | run_async_parameterized_record_query as test_parameterized_record_query, 56 | run_async_parameterized_query as test_parameterized_query, 57 | run_async_select_one as test_select_one, 58 | run_async_select_value as test_select_value, 59 | run_async_delete as test_delete, 60 | run_async_execute_script as test_execute_script, 61 | run_async_record_class_query as test_record_class_query, 62 | run_async_methods as test_methods, 63 | run_async_select_cursor_context_manager as test_select_cursor_context_manager, 64 | run_async_insert_returning as test_insert_returning, 65 | run_async_insert_many as test_insert_many, 66 | ) 67 | 68 | def test_version(): 69 | assert db.__version__.startswith("3.") 70 | -------------------------------------------------------------------------------- /tests/test_apsw.py: -------------------------------------------------------------------------------- 1 | import aiosql 2 | import pytest 3 | import run_tests as t 4 | import utils 5 | 6 | try: 7 | import apsw as db 8 | except ModuleNotFoundError: 9 | pytest.skip("missing driver: apsw", allow_module_level=True) 10 | 11 | pytestmark = [ 12 | pytest.mark.sqlite3, 13 | ] 14 | 15 | @pytest.fixture(scope="module") 16 | def driver(): 17 | return "apsw" 18 | 19 | @pytest.fixture(scope="module") 20 | def date(): 21 | return t.todate 22 | 23 | # driver does not seem to return row counts on ! 24 | @pytest.fixture(scope="module") 25 | def expect(): 26 | return -1 27 | 28 | class APSWConnection(db.Connection): 29 | """APSW Connection wrapper with autocommit off.""" 30 | 31 | def __init__(self, *args, **kwargs): 32 | super().__init__(*args, **kwargs) 33 | self._begin() 34 | 35 | def _begin(self): 36 | self.cursor().execute("BEGIN").close() 37 | 38 | def commit(self): # pragma: no cover 39 | self.cursor().execute("COMMIT").close() 40 | self._begin() 41 | 42 | def _rollback(self): 43 | self.cursor().execute("ROLLBACK").close() 44 | 45 | def rollback(self): # pragma: no cover 46 | self._rollback() 47 | self._begin() 48 | 49 | def close(self): 50 | self._rollback() 51 | super().close() 52 | 53 | @pytest.fixture 54 | def rconn(li_dbpath): 55 | conn = APSWConnection(li_dbpath) 56 | yield conn 57 | conn.close() 58 | 59 | @pytest.fixture 60 | def conn(li_db): 61 | yield li_db 62 | 63 | @pytest.fixture 64 | def dconn(conn): 65 | conn.setrowtrace(utils.dict_factory) 66 | return conn 67 | 68 | from run_tests import ( 69 | run_sanity as test_sanity, 70 | run_something as test_something, 71 | run_cursor as test_cursor, 72 | run_record_query as test_record_query, 73 | run_parameterized_query as test_parameterized_query, 74 | run_parameterized_record_query as test_parameterized_record_query, 75 | run_record_class_query as test_record_class_query, 76 | run_select_cursor_context_manager as test_select_cursor_context_manager, 77 | run_select_one as test_select_one, 78 | # FIXME not supported? 79 | # run_insert_returning as test_insert_returning, 80 | run_delete as test_delete, 81 | run_insert_many as test_insert_many, 82 | run_select_value as test_select_value, 83 | run_date_time as test_date_time, 84 | run_object_attributes as test_object_attributes, 85 | run_execute_script as test_execute_script, 86 | run_modulo as test_modulo, 87 | ) 88 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = ["setuptools", "setuptools-scm"] 3 | build-backend = "setuptools.build_meta" 4 | 5 | [project] 6 | name = "aiosql" 7 | version = "14.1" 8 | authors = [ { name = "William Vaughn et al.", email = "vaughnwilld@gmail.com" } ] 9 | description = "Simple SQL in Python" 10 | readme = "README.rst" 11 | requires-python = ">=3.10" 12 | license = "BSD-2-Clause" 13 | classifiers = [ 14 | "Programming Language :: Python :: 3", 15 | "Programming Language :: SQL", 16 | "Operating System :: OS Independent", 17 | "Development Status :: 5 - Production/Stable", 18 | "Intended Audience :: Developers", 19 | "Topic :: Database", 20 | "Topic :: Software Development :: Libraries :: Python Modules" 21 | ] 22 | 23 | [project.optional-dependencies] 24 | dev = [ 25 | "pytest", "pytest-asyncio", "coverage", 26 | "black", "flake8", "ruff", 27 | "mypy", "pyright", "types-setuptools", 28 | ] 29 | dev-duckdb = [ 30 | # skip, wheel compilation takes too much time… (confirmed 2024-09-30) 31 | "duckdb; implementation_name != 'pypy'" 32 | ] 33 | dev-sqlite = [ 34 | "aiosqlite", 35 | # fails on missing symbol with pypy (confirmed 2024-10-01) 36 | "apsw; implementation_name != 'pypy'" 37 | ] 38 | dev-postgres = [ 39 | "pytest-postgresql", 40 | "asyncpg; python_version < '3.13' and implementation_name != 'pypy'", 41 | "psycopg>=3", 42 | # 2.9.10 needed for 3.13 43 | "psycopg2 >= 2.9.10; implementation_name != 'pypy'", 44 | "pygresql", 45 | "pg8000" 46 | ] 47 | dev-mysql = [ 48 | "pytest-mysql", 49 | "mysqlclient", 50 | "mysql-connector-python", 51 | "pymysql" 52 | ] 53 | dev-mariadb = [ 54 | "mariadb" 55 | ] 56 | dev-mssql = [ 57 | "pymssql" 58 | ] 59 | doc = [ 60 | "sphinx", 61 | "sphinx-rtd-theme", 62 | # FIXME 3.13t rstcheck depends on pydantic 63 | "rstcheck", 64 | ] 65 | dist = [ 66 | "build", "wheel", "twine", 67 | ] 68 | 69 | [tool.setuptools.package-data] 70 | aiosql = [ "py.typed" ] 71 | 72 | [tool.setuptools.packages.find] 73 | include = [ "aiosql", "aiosql.adapters" ] 74 | exclude = [ "tests" ] 75 | 76 | [project.urls] 77 | repository = "https://github.com/nackjicholson/aiosql" 78 | documentation = "https://nackjicholson.github.io/aiosql/" 79 | issues = "https://github.com/nackjicholson/aiosql/issues" 80 | package = "https://pypi.org/project/aiosql/" 81 | 82 | [tool.black] 83 | line-length = 100 84 | target-version = ["py310"] 85 | 86 | [tool.mypy] 87 | exclude = ["(build|venv)/", ".*/virtualenvs/.*"] 88 | ignore_missing_imports = true 89 | -------------------------------------------------------------------------------- /docker/README.md: -------------------------------------------------------------------------------- 1 | # AioSQL Docker Tests 2 | 3 | As MySQL et MariaDB cannot be installed one alongside the other easily, 4 | this directory provides a docker solution with 3 servers (for postgres, 5 | mysql and mariadb) and their clients. Tests with databases sqlite3 and duckdb 6 | are run with mariadb because it has the lowest load. 7 | 8 | ## Servers 9 | 10 | They rely on the official images for `postgres`, `mysql` and `mariadb`. 11 | 12 | ## Clients 13 | 14 | They are built on top of `ubuntu` because using the official `python` 15 | image could not be made to work for all 5 databases. 16 | See docker specifications in `dockerfile.python-*`. 17 | 18 | ## Makefile 19 | 20 | Run docker compose for `pytest` or `coverage`. 21 | 22 | ```shell 23 | # get/update docker images 24 | docker image pull postgres 25 | docker image pull mariadb 26 | docker image pull mysql 27 | docker image pull ubuntu 28 | # generate client images 29 | make docker.aiosql 30 | # run tests in .. 31 | make docker.pytest 32 | make docker.coverage 33 | ``` 34 | 35 | ## Miscellaneous Commands 36 | 37 | Run a client with access to host: 38 | 39 | ```sh 40 | docker run -it -v .:/code --add-host=host.docker.internal:host-gateway some-image bash 41 | ``` 42 | 43 | Build an image: 44 | 45 | ```sh 46 | docker build -t aiosql-python-mysql -f dockerfile.python-mysql . 47 | ``` 48 | 49 | Run docker clients against manually started docker servers: 50 | 51 | ```sh 52 | docker run -it -v .:/code --add-host=host.docker.internal:host-gateway \ 53 | python-aiosql-dbs \ 54 | make VENV=/venv MA_HOST=host.docker.internal check.pytest.mariadb.detached 55 | docker run -it -v .:/code --add-host=host.docker.internal:host-gateway \ 56 | python-aiosql-mysql \ 57 | make VENV=/venv MY_HOST=host.docker.internal check.pytest.mysql.detached 58 | docker run -it -v .:/code --add-host=host.docker.internal:host-gateway \ 59 | python-aiosql-dbs \ 60 | make VENV=/venv MS_HOST=host.docker.internal check.pytest.mssql.detached 61 | ``` 62 | 63 | ## MS SQL Server 64 | 65 | See [ubuntu image](https://hub.docker.com/r/microsoft/mssql-server) and its associated 66 | [documentation](https://learn.microsoft.com/en-us/sql/linux/sql-server-linux-configure-environment-variables) 67 | 68 | ```sh 69 | docker pull mcr.microsoft.com/mssql/server:2022-latest 70 | docker run -e "ACCEPT_EULA=Y" -e "MSSQL_SA_PASSWORD=Abc123.." -e "MSSQL_PID=Developer" \ 71 | -p 1433:1433 --name mssqltest --hostname mssqltest -d mcr.microsoft.com/mssql/server:2022-latest 72 | docker exec -it mssqltest /opt/mssql-tools18/bin/sqlcmd -C -S localhost -U sa -P "Abc123.." 73 | # type a command 74 | # go 75 | ``` 76 | -------------------------------------------------------------------------------- /aiosql/adapters/aiosqlite.py: -------------------------------------------------------------------------------- 1 | from contextlib import asynccontextmanager 2 | 3 | 4 | class AioSQLiteAdapter: 5 | is_aio_driver = True 6 | 7 | def process_sql(self, _query_name, _op_type, sql): 8 | """Pass through function because the ``aiosqlite`` driver can already handle the 9 | ``:var_name`` format used by aiosql and doesn't need any additional processing. 10 | 11 | Args: 12 | 13 | - _query_name (str): The name of the sql query. 14 | - _op_type (SQLOperationType): The type of SQL operation performed by the query. 15 | - sql (str): The sql as written before processing. 16 | 17 | Returns: 18 | 19 | - str: Original SQL text unchanged. 20 | """ 21 | return sql 22 | 23 | async def select(self, conn, _query_name, sql, parameters, record_class=None): 24 | async with conn.execute(sql, parameters) as cur: 25 | if record_class is not None: 26 | column_names = [c[0] for c in cur.description] 27 | for row in await cur.fetchall(): 28 | yield record_class(**dict(zip(column_names, row))) 29 | else: 30 | for row in await cur.fetchall(): 31 | yield row 32 | 33 | async def select_one(self, conn, _query_name, sql, parameters, record_class=None): 34 | async with conn.execute(sql, parameters) as cur: 35 | result = await cur.fetchone() 36 | if result is not None and record_class is not None: 37 | column_names = [c[0] for c in cur.description] 38 | result = record_class(**dict(zip(column_names, result))) 39 | return result 40 | 41 | async def select_value(self, conn, _query_name, sql, parameters): 42 | async with conn.execute(sql, parameters) as cur: 43 | result = await cur.fetchone() 44 | return result[0] if result else None 45 | 46 | @asynccontextmanager 47 | async def select_cursor(self, conn, _query_name, sql, parameters): 48 | async with conn.execute(sql, parameters) as cur: 49 | yield cur 50 | 51 | async def insert_returning(self, conn, _query_name, sql, parameters): 52 | async with conn.execute(sql, parameters) as cur: 53 | return cur.lastrowid 54 | 55 | async def insert_update_delete(self, conn, _query_name, sql, parameters): 56 | async with conn.execute(sql, parameters) as cur: 57 | return cur.rowcount 58 | 59 | async def insert_update_delete_many(self, conn, _query_name, sql, parameters): 60 | cur = await conn.executemany(sql, parameters) 61 | await cur.close() 62 | 63 | async def execute_script(self, conn, sql): 64 | await conn.executescript(sql) 65 | return "DONE" 66 | -------------------------------------------------------------------------------- /docs/source/conf.py: -------------------------------------------------------------------------------- 1 | # Configuration file for the Sphinx documentation builder. 2 | # 3 | # This file only contains a selection of the most common options. For a full 4 | # list see the documentation: 5 | # https://www.sphinx-doc.org/en/master/usage/configuration.html 6 | 7 | # -- Path setup -------------------------------------------------------------- 8 | 9 | # If extensions (or modules to document with autodoc) are in another directory, 10 | # add these directories to sys.path here. If the directory is relative to the 11 | # documentation root, use os.path.abspath to make it absolute, like shown here. 12 | # 13 | # import os 14 | # import sys 15 | # sys.path.insert(0, os.path.abspath('.')) 16 | 17 | 18 | # -- Project information ----------------------------------------------------- 19 | 20 | project = "aiosql" 21 | copyright = "2018-2025, William Vaughn et alii." 22 | author = "William Vaughn " 23 | github_doc_root = "https://github.com/nackjicholson/aiosql/tree/master/docs/source/" 24 | 25 | # The full version, including alpha/beta/rc tags 26 | from importlib.metadata import version as pkg_version 27 | release = pkg_version("aiosql") 28 | 29 | # -- General configuration --------------------------------------------------- 30 | 31 | # Add any Sphinx extension module names here, as strings. They can be 32 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom 33 | # ones. 34 | extensions = ["sphinx_rtd_theme", "sphinx.ext.autodoc", "sphinx.ext.githubpages"] 35 | 36 | # Add any paths that contain templates here, relative to this directory. 37 | # templates_path = ["_templates"] 38 | 39 | # The language for content autogenerated by Sphinx. Refer to documentation 40 | # for a list of supported languages. 41 | # 42 | # This is also used if you do content translation via gettext catalogs. 43 | # Usually you set "language" from the command line for these cases. 44 | language = "en" 45 | 46 | # List of patterns, relative to source directory, that match files and 47 | # directories to ignore when looking for source files. 48 | # This pattern also affects html_static_path and html_extra_path. 49 | exclude_patterns = [] # type: ignore 50 | 51 | 52 | # -- Options for HTML output ------------------------------------------------- 53 | 54 | # The theme to use for HTML and HTML Help pages. See the documentation for 55 | # a list of builtin themes. 56 | # 57 | html_theme = "sphinx_rtd_theme" 58 | 59 | # Add any paths that contain custom static files (such as style sheets) here, 60 | # relative to this directory. They are copied after the builtin static files, 61 | # so a file named "default.css" will overwrite the builtin "default.css". 62 | # html_static_path = ["_static"] 63 | 64 | 65 | # -- Extension configuration ------------------------------------------------- 66 | -------------------------------------------------------------------------------- /aiosql/adapters/ageneric.py: -------------------------------------------------------------------------------- 1 | from contextlib import asynccontextmanager 2 | from ..types import AsyncDriverAdapterProtocol 3 | 4 | # it is unclear how generic is this class 5 | class AsyncGenericAdapter(AsyncDriverAdapterProtocol): 6 | 7 | is_aio_driver = True 8 | 9 | def process_sql(self, query_name, op_type, sql): 10 | return sql # pragma: no cover 11 | 12 | # this is an asynchronous generator 13 | async def select(self, conn, query_name, sql, parameters, record_class=None): 14 | cur = await conn.execute(sql, parameters) 15 | try: 16 | if record_class is not None: 17 | column_names = [c[0] for c in cur.description] 18 | for row in await cur.fetchall(): 19 | yield record_class(**dict(zip(column_names, row))) 20 | else: 21 | # psycopg 3.3: async for row in cur.results(): 22 | for row in await cur.fetchall(): 23 | yield row 24 | finally: 25 | await cur.close() 26 | 27 | async def select_one(self, conn, query_name, sql, parameters, record_class=None): 28 | cur = await conn.execute(sql, parameters) 29 | result = await cur.fetchone() 30 | if result is not None and record_class is not None: 31 | column_names = [c[0] for c in cur.description] 32 | result = record_class(**dict(zip(column_names, result))) 33 | await cur.close() 34 | return result 35 | 36 | async def select_value(self, conn, query_name, sql, parameters): 37 | cur = await conn.execute(sql, parameters) 38 | result = await cur.fetchone() 39 | res = result[0] if result else None 40 | await cur.close() 41 | return res 42 | 43 | @asynccontextmanager 44 | async def select_cursor(self, conn, query_name, sql, parameters): 45 | cur = await conn.execute(sql, parameters) 46 | yield cur 47 | await cur.close() 48 | 49 | async def insert_returning(self, conn, query_name, sql, parameters): 50 | cur = await conn.execute(sql, parameters) 51 | result = await cur.fetchone() 52 | # res = result[0] if result else None 53 | await cur.close() 54 | return result 55 | 56 | async def insert_update_delete(self, conn, query_name, sql, parameters): 57 | cur = await conn.execute(sql, parameters) 58 | rc = cur.rowcount if hasattr(cur, "rowcount") else None 59 | await cur.close() 60 | return rc 61 | 62 | async def insert_update_delete_many(self, conn, query_name, sql, parameters): 63 | cur = conn.cursor() 64 | res = await cur.executemany(sql, parameters) 65 | await cur.close() 66 | return res 67 | 68 | async def execute_script(self, conn, sql): 69 | cur = await conn.execute(sql) 70 | msg = cur.statusmessage if hasattr(cur, "statusmessage") else "DONE" 71 | await cur.close() 72 | return msg 73 | -------------------------------------------------------------------------------- /tests/conf_pgsql.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from conf_schema import create_user_blogs, fill_user_blogs, drop_user_blogs 3 | import run_tests as t 4 | 5 | # guess psycopg version from a connection 6 | def is_psycopg2(conn): 7 | return hasattr(conn, "get_dsn_parameters") 8 | 9 | try: 10 | from pytest_postgresql import factories as pg_factories 11 | 12 | @pytest.fixture 13 | def pg_conn(request): 14 | """Loads seed data and return a database connection.""" 15 | is_detached = request.config.getoption("postgresql_detached") 16 | if is_detached: # pragma: no cover 17 | # this is *NOT* a connection, it does not have a "cursor" 18 | pg = request.getfixturevalue("postgresql_noproc") 19 | import psycopg 20 | 21 | conn = psycopg.connect( 22 | host=pg.host, 23 | port=pg.port, 24 | user=pg.user, 25 | password=pg.password, 26 | dbname=pg.dbname, 27 | options=pg.options, 28 | ) 29 | else: 30 | # returns the underlying pytest-postgresql connection 31 | # which may be psycopg version 2 or 3, depending. 32 | conn = request.getfixturevalue("postgresql") 33 | 34 | # yield the psycopg? connection 35 | yield conn 36 | 37 | # done 38 | conn.close() 39 | 40 | @pytest.fixture 41 | def pg_params(request, pg_conn): 42 | """Build postgres connection parameters as a dictionary.""" 43 | if is_psycopg2(pg_conn): # pragma: no cover 44 | dsn = pg_conn.get_dsn_parameters() 45 | del dsn["tty"] 46 | else: # assume psycopg 3.x 47 | dsn = pg_conn.info.get_parameters() 48 | # non empty password? 49 | if "password" not in dsn: 50 | dsn["password"] = request.config.getoption("postgresql_password") or "" 51 | if "port" not in dsn: 52 | dsn["port"] = 5432 53 | return dsn 54 | 55 | @pytest.fixture 56 | def pg_dsn(request, pg_params): 57 | """Build a postgres URL connection string.""" 58 | p = pg_params 59 | yield f"postgres://{p['user']}:{p['password']}@{p['host']}:{p['port']}/{p['dbname']}" 60 | 61 | @pytest.fixture 62 | def pg_db(rconn, queries): 63 | create_user_blogs(rconn, queries) 64 | fill_user_blogs(rconn, queries) 65 | yield rconn 66 | drop_user_blogs(rconn, queries) 67 | 68 | except ModuleNotFoundError: 69 | # FIXME empty fixtures to please pytest 70 | 71 | @pytest.fixture 72 | def pg_conn(): 73 | raise Exception("unimplemented fixture") 74 | 75 | @pytest.fixture 76 | def pg_params(): 77 | raise Exception("unimplemented fixture") 78 | 79 | @pytest.fixture 80 | def pg_dsn(): 81 | raise Exception("unimplemented fixture") 82 | 83 | @pytest.fixture 84 | def pg_db(): 85 | raise Exception("unimplemented fixture") 86 | -------------------------------------------------------------------------------- /docs/source/contributing.rst: -------------------------------------------------------------------------------- 1 | Contributing 2 | ============ 3 | 4 | First, thank you for considering to make a contribution to this project. 5 | Spending your valuable time helping make this project better is deeply appreciated. 6 | All kinds of contributions are helpful and welcome. 7 | 8 | - Report issues ``__ 9 | - Review or make your own pull requests ``__ 10 | - Write documentation ``__ 11 | 12 | Whether you have an idea for a feature improvement or have found a troubling bug, thank you for being here. 13 | 14 | 15 | Packaging & Distribution 16 | ------------------------ 17 | 18 | This aiosql repository uses the Python standard packaging tools. 19 | Read about them in more detail at the following links. 20 | 21 | - `Python Packaging User Guide `__ 22 | - `PyPA - Packaging & Distributing projects `__ 23 | - `setuptools `__ 24 | - `build `__ 25 | - `twine `__ 26 | 27 | Development Setup 28 | ----------------- 29 | 30 | 1. Create a virtual environment 31 | 32 | .. code:: sh 33 | 34 | # get the project sources 35 | git clone git@github.com:nackjicholson/aiosql.git 36 | cd aiosql 37 | # create a venv manually 38 | python -m venv venv 39 | source venv/bin/activate 40 | pip install --upgrade pip 41 | 42 | All subsequent steps will assume you are using python within your activated virtual environment. 43 | 44 | 1. Install the development dependencies 45 | 46 | As a development library, aiosql is expected to work with all supported 47 | versions of Python, and many drivers. 48 | The bare minimum of version pinning is declared in the dependencies. 49 | 50 | .. code:: sh 51 | 52 | # development tools 53 | pip install .[dev] 54 | # per-database stuff 55 | pip install .[dev-sqlite] 56 | pip install .[dev-postgres] 57 | pip install .[dev-duckdb] 58 | pip install .[dev-mysql] 59 | pip install .[dev-mariadb] 60 | 61 | 1. Run tests 62 | 63 | .. code:: sh 64 | 65 | pytest 66 | 67 | Alternatively, there is a convenient ``Makefile`` to automate the above tasks: 68 | 69 | .. code:: sh 70 | 71 | make venv.dev # install dev virtual environment 72 | source venv/bin/activate 73 | make check # run all checks: pytest, flake8, coverage… 74 | 75 | Also, there is a working ``poetry`` setup in ``pyproject.toml``. 76 | 77 | Dependency Management 78 | --------------------- 79 | 80 | There is no dependency for using ``aiosql`` other than installing your 81 | driver of choice. 82 | 83 | For development you need to test with various databases and even more drivers, 84 | see above for generating a working python environment. 85 | 86 | See also the ``docker`` sub-directory which contains dockerfiles for testing 87 | with Postgres, MySQL, MariaDB and MS SQL Server. 88 | -------------------------------------------------------------------------------- /example/example.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | import sqlite3 3 | from datetime import datetime 4 | from pathlib import Path 5 | from typing import NamedTuple 6 | 7 | import aiosql 8 | 9 | 10 | dir_path = Path(__file__).parent 11 | sql_path = dir_path / "sql" 12 | db_path = dir_path / "exampleblog.db" 13 | queries = aiosql.from_path(dir_path / "sql", "sqlite3") 14 | 15 | 16 | users = [("bobsmith", "Bob", "Smith"), ("johndoe", "John", "Doe"), ("janedoe", "Jane", "Doe")] 17 | blogs = [ 18 | ( 19 | 1, 20 | "What I did Today", 21 | """\ 22 | I mowed the lawn, washed some clothes, and ate a burger. 23 | 24 | Until next time, 25 | Bob""", 26 | "2017-07-28", 27 | ), 28 | ( 29 | 3, 30 | "Testing", 31 | """\ 32 | Is this thing on? 33 | """, 34 | "2018-01-01", 35 | ), 36 | ( 37 | 1, 38 | "How to make a pie.", 39 | """\ 40 | 1. Make crust 41 | 2. Fill 42 | 3. Bake 43 | 4. Eat 44 | """, 45 | "2018-11-23", 46 | ), 47 | ] 48 | 49 | 50 | def get_conn() -> sqlite3.Connection: 51 | conn = sqlite3.connect(db_path) 52 | conn.row_factory = sqlite3.Row 53 | return conn 54 | 55 | 56 | def createdb(): 57 | if db_path.exists(): 58 | raise SystemExit(f"Database at {db_path} already exists.") 59 | conn = get_conn() 60 | print("Inserting users and blogs data.") 61 | with conn: 62 | queries.create_schema(conn) 63 | queries.users.insert_many(conn, users) 64 | queries.blogs.insert_many(conn, blogs) 65 | print("Done!") 66 | conn.close() 67 | 68 | 69 | def deletedb(): 70 | print(f"Deleting the {db_path} file") 71 | if db_path.exists(): 72 | db_path.unlink() 73 | 74 | 75 | def get_users(): 76 | conn = get_conn() 77 | for user in queries.users.get_all(conn): 78 | s = "{" 79 | for k in user.keys(): 80 | s += f"{k}: {user[k]}, " 81 | s = s[:-2] 82 | s += "}" 83 | print(s) 84 | 85 | 86 | def get_user_blogs(username): 87 | conn = get_conn() 88 | user_blogs = queries.blogs.get_user_blogs(conn, username=username) 89 | for user_blog in user_blogs: 90 | print("------") 91 | print(f"{user_blog['title']}") 92 | print(f"by {user_blog['author']} at {user_blog['published']}") 93 | 94 | 95 | if __name__ == "__main__": 96 | parser = argparse.ArgumentParser() 97 | subparsers = parser.add_subparsers() 98 | 99 | createdb_parser = subparsers.add_parser("createdb") 100 | createdb_parser.set_defaults(cmd=createdb) 101 | 102 | deletedb_parser = subparsers.add_parser("deletedb") 103 | deletedb_parser.set_defaults(cmd=deletedb) 104 | 105 | get_users_parser = subparsers.add_parser("get-users") 106 | get_users_parser.set_defaults(cmd=get_users) 107 | 108 | get_user_blogs_parser = subparsers.add_parser("get-user-blogs") 109 | get_user_blogs_parser.add_argument("username") 110 | get_user_blogs_parser.set_defaults(cmd=get_user_blogs) 111 | 112 | args = parser.parse_args() 113 | cmd_kwargs = {k: v for k, v in vars(args).items() if k != "cmd"} 114 | args.cmd(**cmd_kwargs) 115 | -------------------------------------------------------------------------------- /tests/conf_schema.py: -------------------------------------------------------------------------------- 1 | # non portable SQL statements to create, fill and clear the database schema 2 | 3 | import asyncio 4 | from pathlib import Path 5 | import csv 6 | import utils 7 | import datetime 8 | 9 | # 10 | # yuk… hide sync/async 11 | # 12 | # We do not want to replicate schema creation functions for async. 13 | # 14 | # I believe that the asynchronous approach is a poor performance kludge 15 | # against bad interpreter parallelism support (JavaScript, CPython). 16 | # Because the interpreter is so bad at switching between contexts, the model 17 | # just offloads the task to the user for a limited benefit as it only really 18 | # brings improvements to IO-bound loads. 19 | # This interpreter-level implementation induces significant code complexity and 20 | # execution overheads. 21 | # It makes no sense from the hardware and operating system point of view, 22 | # which already have pretty efficient threads running on multicore cpus. 23 | 24 | def execute_any(conn, queries, name): 25 | utils.log.debug(f"executing: {name}") 26 | f = queries.f(name) 27 | if queries.is_async: 28 | return utils.run_async(f(conn)) 29 | else: 30 | return f(conn) 31 | 32 | def execute_commit(conn, queries): 33 | if queries.is_async: 34 | # transaction management is different with asyncpg… 35 | if queries._driver == "asyncpg": 36 | return 37 | return utils.run_async(conn.commit()) 38 | else: 39 | return conn.commit() 40 | 41 | def execute_many(conn, queries, name, data): 42 | f = queries.f(name) 43 | if queries.is_async: 44 | return utils.run_async(f(conn, data)) 45 | else: 46 | return f(conn, data) 47 | 48 | # CSV data file paths 49 | BLOGDB_PATH = Path(__file__).parent / "blogdb" 50 | USERS_DATA_PATH = BLOGDB_PATH / "data/users_data.csv" 51 | BLOGS_DATA_PATH = BLOGDB_PATH / "data/blogs_data.csv" 52 | 53 | # schema creation 54 | _CREATE_USER_BLOGS = [ 55 | "blogs.create_table_users", 56 | "blogs.create_table_blogs", 57 | ] 58 | 59 | def create_user_blogs(conn, queries): 60 | for q in _CREATE_USER_BLOGS: 61 | execute_any(conn, queries, q) 62 | execute_commit(conn, queries) 63 | # sanity check! 64 | count = execute_any(conn, queries, "users.get_count") 65 | assert count == 0 66 | 67 | # schema data 68 | def fill_user_blogs(conn, queries): 69 | with USERS_DATA_PATH.open() as fp: 70 | users = [ tuple(r) for r in csv.reader(fp) ] 71 | if queries._driver in ("pg8000", "asyncpg"): 72 | users = [ { "name": t[0], "fname": t[1], "lname": t[2] } for t in users ] 73 | execute_many(conn, queries, "users.add_many_users", users) 74 | with BLOGS_DATA_PATH.open() as fp: 75 | blogs = [ tuple(r) for r in csv.reader(fp) ] 76 | if queries._driver in ("pg8000", "asyncpg"): 77 | blogs = [ { "userid": int(t[0]), "title": t[1], "content": t[2], "published": datetime.date.fromisoformat(t[3]) } 78 | for t in blogs ] 79 | execute_many(conn, queries, "blogs.add_many_blogs", blogs) 80 | execute_commit(conn, queries) 81 | 82 | # schema destruction 83 | _DROP_USER_BLOGS = [ 84 | "blogs.drop_table_comments", 85 | "blogs.drop_table_blogs", 86 | "blogs.drop_table_users", 87 | ] 88 | 89 | def drop_user_blogs(conn, queries): 90 | for q in _DROP_USER_BLOGS: 91 | execute_any(conn, queries, q) 92 | execute_commit(conn, queries) 93 | -------------------------------------------------------------------------------- /tests/conf_mysql.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | import importlib 3 | import utils as u 4 | import time 5 | from conf_schema import create_user_blogs, fill_user_blogs, drop_user_blogs 6 | 7 | try: 8 | from pytest_mysql import factories 9 | 10 | @pytest.fixture 11 | def my_dsn(request): 12 | """Return connection parameters suitable to target driver.""" 13 | is_detached = request.config.getoption("mysql_detached") 14 | driver = request.config.getoption("mysql_driver") 15 | database = request.config.getoption("mysql_dbname") or "test" 16 | if is_detached: 17 | mp = request.getfixturevalue("mysql_noproc") 18 | dsn = { 19 | "password": request.config.getoption("mysql_passwd"), 20 | } 21 | else: 22 | if not u.has_cmd("mysqld"): 23 | pytest.skip("test needs mysqld") 24 | mp = request.getfixturevalue("mysql_proc") 25 | # u.log.debug(f"mp: {mp}") 26 | # this fixture creates the database as a side effect 27 | conn = request.getfixturevalue("mysql") 28 | # u.log.debug(f"conn: {conn}") 29 | assert mp.running() 30 | assert mp.host == "localhost" 31 | # dsn = {"database": "test"} # if driver == "mysql.connector" else {} 32 | # NOTE mysql complains about host even if a unix socket is used:-/ 33 | dsn = {"unix_socket": mp.unixsocket} 34 | # add common connection parameters… although host may be unused 35 | dsn.update(user=mp.user, host=mp.host, port=mp.port, database=database) 36 | # FIXME passwd? 37 | # NOTE pip install mysql-connector-python 38 | # TODO check for mysql.connector version ? 39 | if driver == "mysql.connector": 40 | dsn.update(auth_plugin="mysql_native_password") 41 | u.log.debug(f"my_dsn for {driver}: {dsn}") 42 | yield dsn 43 | 44 | @pytest.fixture 45 | def my_driver(request): 46 | driver = request.config.getoption("mysql_driver") 47 | db = importlib.import_module(driver) 48 | return db 49 | 50 | @pytest.fixture 51 | def my_conn(request, my_dsn, my_driver): 52 | """Return a connection using the expected driver.""" 53 | tries = request.config.getoption("mysql_tries") 54 | fails = 0 55 | while tries > 0: 56 | tries -= 1 57 | try: 58 | with my_driver.connect(**my_dsn) as conn: 59 | tries = 0 60 | yield conn 61 | except Exception as e: 62 | fails += 1 63 | u.log.warning(f"{driver} connection failed ({fails}): {e}") 64 | time.sleep(1.0) 65 | 66 | @pytest.fixture 67 | def my_db(my_conn, queries): 68 | """Build the test database.""" 69 | create_user_blogs(my_conn, queries) 70 | fill_user_blogs(my_conn, queries) 71 | yield my_conn 72 | drop_user_blogs(my_conn, queries) 73 | 74 | except ModuleNotFoundError: 75 | # provide empty fixtures to please pytest "parsing" 76 | 77 | @pytest.fixture 78 | def my_dsn(): 79 | raise Exception("undefined fixture") 80 | 81 | @pytest.fixture 82 | def my_driver(): 83 | raise Exception("undefined fixture") 84 | 85 | @pytest.fixture 86 | def my_conn(): 87 | raise Exception("undefined fixture") 88 | 89 | @pytest.fixture 90 | def my_db(): 91 | raise Exception("undefined fixture") 92 | -------------------------------------------------------------------------------- /docs/source/database-driver-adapters.rst: -------------------------------------------------------------------------------- 1 | Database Driver Adapters 2 | ======================== 3 | 4 | Database driver adapters in aiosql allow extension of the library to support 5 | additional database drivers. 6 | If you are using a driver other than the ones currently supported by built-in 7 | driver adapters (``sqlite3``, ``apsw``, ``aiosqlite``, ``psycopg``, 8 | ``psycopg2``, ``pg8000``, ``pygresql``, ``asyncpg``, ``pymysql``, 9 | ``mysqlclient``, ``mysql-connector``, ``duckdb``, ``pymssql``), 10 | first check whether your driver supports *pyformat* or *named* paramstyles. 11 | If so, check (manually) whether the default PEP 249 drivers work: 12 | 13 | .. code:: python 14 | 15 | import acmedb # your PEP 249 driver 16 | import aiosql 17 | 18 | conn = acmedb.connect("…") 19 | queries = aiosql.from_str("-- name: add42$\nSELECT :n + 42;\n", acmedb) 20 | assert queries.add42(conn, n=18) == 60 21 | 22 | If this simplistic test works, do more tests involving all operators (see the 23 | pytest tests), then create an issue to notify that your driver works out of the 24 | box so it can be advertised from the readme. 25 | 26 | If it does not work or if you have an asynchronous driver, you will need to make 27 | your own. 28 | Good news, it should be very close to the existing supported drivers. 29 | A database driver adapter is a duck-typed class that follows either of the 30 | ``Protocol`` types defined in 31 | `aiosql/types.py `__: 32 | 33 | .. literalinclude:: ../../aiosql/types.py 34 | :language: python 35 | :lines: 61-104 36 | :caption: PEP 249 Synchronous Adapter 37 | 38 | .. literalinclude:: ../../aiosql/types.py 39 | :language: python 40 | :lines: 107-152 41 | :caption: Asynchronous Adapter 42 | 43 | Some comments about these classes, one for synchronous queries (PEP 249) and 44 | the other for asynchronous queries: 45 | 46 | - ``_cursor`` is an internal method to generate a cursor, as some drivers 47 | need to pass parameters at this phase. 48 | - ``process_sql`` is used to preprocess SQL queries so has to handle named 49 | parameters as they are managed by the target driver. 50 | - ``select``, ``select_one``, ``insert_update_delete``, ``insert_update_delete_many``, 51 | ``insert_returning`` and ``execute_script`` implement all operations. 52 | - ``select_cursor`` returns the raw cursor from a ``select``. 53 | 54 | There isn't much difference between these two protocols besides the 55 | ``async def`` syntax for the method definition. 56 | There is one more sneaky difference, the aiosql code expects async adapters to 57 | have a static class field ``is_aio_driver = True`` so it can tell when to use 58 | ``await`` for method returns. 59 | Looking at the source of the builtin 60 | `adapters/ `__ 61 | is a great place to start seeing how you may write your own database driver adapter. 62 | 63 | For a PEP 249 driver, consider inheriting from ``aiosql.adapters.Generic`` if you can. 64 | 65 | To use the adapter pass its constructor or factory as the ``driver_adapter`` 66 | argument when building Queries: 67 | 68 | .. code:: python 69 | 70 | queries = aiosql.from_path("foo.sql", driver_adapter=AcmeAdapter) 71 | 72 | Alternatively, an adapter can be registered or overriden: 73 | 74 | .. code:: python 75 | 76 | # in AcmeAdapter provider, eg module "acmedb_aiosql" 77 | import aiosql 78 | aiosql.register_adapter("acmedb", AcmeAdapter) 79 | 80 | # then use it elsewhere 81 | import aiosql 82 | queries = aiosql.from_path("some.sql", "acmedb") 83 | 84 | Please ask questions on `GitHub Issues `__. 85 | If the community makes additional adapter add-ons it will be listed from the doc. 86 | -------------------------------------------------------------------------------- /tests/conf_mssql.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | import importlib 3 | import utils as u 4 | import time 5 | from conf_schema import create_user_blogs, fill_user_blogs, drop_user_blogs 6 | 7 | def ms_has_db(conn, database): 8 | with conn.cursor() as cursor: 9 | cursor.execute("SELECT COUNT(*) AS cnt FROM sys.databases WHERE name = %s", (database,)) 10 | return cursor.fetchone() in ({"cnt": 1}, (1,)) 11 | 12 | try: 13 | 14 | # NOTE rhetorical, there is only one ms driver for now 15 | @pytest.fixture(scope="module") 16 | def ms_driver(request): 17 | """Return driver class.""" 18 | driver = request.config.getoption("mssql_driver") or "pymssql" 19 | db = importlib.import_module(driver) 20 | return db 21 | 22 | @pytest.fixture(scope="module") 23 | def ms_dsn(request): 24 | """Return connection parameters suitable to pymssql driver.""" 25 | yield { 26 | # see conftest.py for list of options 27 | "server": request.config.getoption("mssql_server") or '.', 28 | "port": request.config.getoption("mssql_port") or 1433, 29 | "user": request.config.getoption("mssql_user") or "sa", 30 | "password": request.config.getoption("mssql_password"), 31 | "database": request.config.getoption("mssql_database") or "pytest", 32 | "as_dict": True, 33 | "autocommit": False, 34 | } 35 | 36 | @pytest.fixture(scope="module") 37 | def ms_master(ms_dsn): 38 | """Return connection parameters suitable for "system admin" access.""" 39 | dsn = dict(ms_dsn) 40 | dsn["database"] = "master" 41 | dsn["autocommit"] = True 42 | yield dsn 43 | 44 | @pytest.fixture 45 | def ms_conn(request, ms_dsn, ms_driver): 46 | """Return a simple connection using the expected driver.""" 47 | driver, db = ms_driver.__name__, ms_driver 48 | tries = request.config.getoption("mssql_tries") or 3 49 | with u.db_connect(ms_driver, tries, **ms_dsn) as conn: 50 | yield conn 51 | 52 | @pytest.fixture(scope="module") 53 | def ms_db(ms_driver, ms_dsn, ms_master, queries): 54 | """Build the test database and return a connection to that.""" 55 | with ms_driver.connect(**ms_master) as conn: 56 | # initial contents if needed 57 | if not ms_has_db(conn, "pytest"): 58 | with conn.cursor() as cur: 59 | cur.execute("CREATE DATABASE pytest") 60 | cur.execute("USE pytest") 61 | conn.commit() 62 | create_user_blogs(conn, queries) 63 | fill_user_blogs(conn, queries) 64 | else: 65 | u.log.warning("skipping pytest schema creation") 66 | # connection to pytest possibly database created above 67 | with ms_driver.connect(**ms_dsn) as conn: 68 | yield conn 69 | # cleanup 70 | with ms_driver.connect(**ms_master) as conn: 71 | with conn.cursor() as cur: 72 | # u.log.warning("cleaning up pytest schema") 73 | cur.execute("DROP DATABASE pytest") 74 | conn.commit() 75 | 76 | except ModuleNotFoundError: 77 | # provide empty fixtures to please pytest "parsing" 78 | 79 | @pytest.fixture 80 | def ms_driver(): 81 | raise Exception("unimplemented fixture") 82 | 83 | @pytest.fixture 84 | def ms_dsn(): 85 | raise Exception("unimplemented fixture") 86 | 87 | @pytest.fixture 88 | def ms_master(): 89 | raise Exception("unimplemented fixture") 90 | 91 | @pytest.fixture 92 | def ms_conn(): 93 | raise Exception("unimplemented fixture") 94 | 95 | @pytest.fixture 96 | def ms_db(): 97 | raise Exception("unimplemented fixture") 98 | -------------------------------------------------------------------------------- /aiosql/adapters/duckdb.py: -------------------------------------------------------------------------------- 1 | from .generic import GenericAdapter 2 | from ..utils import VAR_REF 3 | 4 | 5 | def _colon_to_dollar(ma) -> str: 6 | """Convert 'WHERE :id = 1' to 'WHERE $id = 1'.""" 7 | gd = ma.groupdict() 8 | if gd["dquote"] is not None: 9 | return gd["dquote"] 10 | elif gd["squote"] is not None: 11 | return gd["squote"] 12 | else: 13 | return f'{gd["lead"]}${gd["var_name"]}' 14 | 15 | 16 | class DuckDBAdapter(GenericAdapter): 17 | """DuckDB Adapter""" 18 | 19 | def __init__(self, *args, cursor_as_dict: bool = False, use_cursor: bool = True, **kwargs): 20 | super().__init__(*args, **kwargs) 21 | # whether to converts the default tuple response to a dict. 22 | self._convert_row_to_dict = cursor_as_dict 23 | self._use_cursor = use_cursor 24 | 25 | def _cursor(self, conn): 26 | """Get a cursor from a connection.""" 27 | # For DuckDB cursor is duplicated connection so we don't want to use it 28 | if self._use_cursor: 29 | return conn.cursor(*self._args, **self._kwargs) 30 | return conn 31 | 32 | def process_sql(self, query_name, op_type, sql): 33 | return VAR_REF.sub(_colon_to_dollar, sql) 34 | 35 | def insert_returning(self, conn, query_name, sql, parameters): # pragma: no cover 36 | # very similar to select_one but the returned value 37 | cur = self._cursor(conn) 38 | try: 39 | cur.execute(sql, parameters) 40 | # we have to use fetchall instead of fetchone for now due to this: 41 | # https://github.com/duckdb/duckdb/issues/6008 42 | res = cur.fetchall() 43 | finally: 44 | if self._use_cursor: 45 | cur.close() 46 | if isinstance(res, list): 47 | res = res[0] 48 | return res[0] if res and len(res) == 1 else res 49 | 50 | def select(self, conn, query_name: str, sql: str, parameters, record_class=None): 51 | column_names: list[str] = [] 52 | cur = self._cursor(conn) 53 | try: 54 | cur.execute(sql, parameters) 55 | if record_class is None: 56 | first = True 57 | for row in cur.fetchall(): 58 | if first: # get column names on the fly 59 | column_names = [c[0] for c in cur.description or []] 60 | first = False 61 | if self._convert_row_to_dict: # pragma: no cover 62 | # strict=False: requires 3.10 63 | yield dict(zip(column_names, row)) 64 | else: 65 | yield row 66 | else: # pragma: no cover 67 | first = True 68 | for row in cur.fetchall(): 69 | if first: # only get description on the fly, for apsw 70 | column_names = [c[0] for c in cur.description or []] 71 | first = False 72 | # strict=False: requires 3.10 73 | yield record_class(**dict(zip(column_names, row))) 74 | finally: 75 | if self._use_cursor: 76 | cur.close() 77 | 78 | def select_one(self, conn, query_name, sql, parameters, record_class=None): 79 | cur = self._cursor(conn) 80 | try: 81 | cur.execute(sql, parameters) 82 | result = cur.fetchone() 83 | if result is not None and record_class is not None: # pragma: no cover 84 | column_names = [c[0] for c in cur.description or []] 85 | # strict=False: requires 3.10 86 | result = record_class(**dict(zip(column_names, result))) 87 | elif result is not None and self._convert_row_to_dict: # pragma: no cover 88 | column_names = [c[0] for c in cur.description or []] 89 | result = dict(zip(column_names, result)) 90 | finally: 91 | if self._use_cursor: 92 | cur.close() 93 | return result 94 | -------------------------------------------------------------------------------- /tests/test_patterns.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from aiosql.utils import VAR_REF 3 | from aiosql.query_loader import _UNCOMMENT, _remove_ml_comments 4 | 5 | pytestmark = [ 6 | pytest.mark.misc, 7 | ] 8 | 9 | 10 | def test_var_pattern_is_quote_aware(): 11 | sql = r""" 12 | select foo_id, 13 | bar_id, 14 | to_char(created_at, 'YYYY-MM-DD"T"HH24:MI:SSOF') 15 | from foos 16 | join bars using(bar_id) 17 | join bazs using(baz_id) 18 | where created_at < :created_at_mark 19 | and foo_mark > :foo_mark 20 | order by created_at desc, source_name asc; 21 | """ 22 | groupdicts = [m.groupdict() for m in VAR_REF.finditer(sql)] 23 | assert len(groupdicts) == 3 24 | 25 | expected = [ 26 | { 27 | "dquote": None, 28 | "lead": None, 29 | "squote": "'YYYY-MM-DD\"T\"HH24:MI:SSOF'", 30 | "var_name": None, 31 | }, 32 | { 33 | "dquote": None, 34 | "lead": " ", 35 | "squote": None, 36 | "var_name": "created_at_mark", 37 | }, 38 | {"dquote": None, "lead": " ", "squote": None, "var_name": "foo_mark"}, 39 | ] 40 | assert groupdicts == expected 41 | 42 | 43 | def test_var_pattern_does_not_require_semicolon_trail(): 44 | """Make sure keywords ending queries are recognized even without 45 | semi-colons. 46 | """ 47 | sql = r""" 48 | select a, 49 | b, 50 | c 51 | FROM foo 52 | WHERE a = :a""" 53 | 54 | groupdicts = [m.groupdict() for m in VAR_REF.finditer(sql)] 55 | assert len(groupdicts) == 1 56 | 57 | expected = {"dquote": None, "lead": " ", "squote": None, "var_name": "a"} 58 | assert groupdicts[0] == expected 59 | 60 | 61 | def test_var_pattern_handles_empty_sql_string_literals(): 62 | """Make sure SQL '' are treated correctly and don't cause a substitution to be skipped.""" 63 | sql = r""" 64 | select blah 65 | from foo 66 | where lower(regexp_replace(blah,'\\W','','g')) = lower(regexp_replace(:blah,'\\W','','g'));""" 67 | 68 | groupdicts = [m.groupdict() for m in VAR_REF.finditer(sql)] 69 | 70 | expected_single_quote_match = { 71 | "dquote": None, 72 | "lead": None, 73 | "squote": "''", 74 | "var_name": None, 75 | } 76 | assert groupdicts[1] == expected_single_quote_match 77 | 78 | expected_var_match = { 79 | "dquote": None, 80 | "lead": "(", 81 | "squote": None, 82 | "var_name": "blah", 83 | } 84 | assert groupdicts[3] == expected_var_match 85 | 86 | 87 | # must remove *only* OK comments 88 | COMMENTED = """ 89 | KO 90 | -- KO 91 | /* OK */ 92 | '/* KO */' 93 | "/* KO */" 94 | ' /* KO 95 | */' 96 | " /* KO 97 | */" 98 | /* 99 | * OK 100 | */ 101 | -- /* KO 102 | -- KO */ 103 | /* OK 104 | -- OK 105 | ' OK ' "OK " 106 | */ 107 | KO 108 | /* OK */ -- KO 'KO' 109 | -- KO */ 110 | /*+ KO (hints must be kept!) */ 111 | """ 112 | 113 | 114 | def test_comments(): 115 | n = 0 116 | for ma in _UNCOMMENT.finditer(COMMENTED): 117 | matches = ma.groupdict() 118 | s, d, c, m = matches["squote"], matches["dquote"], matches["oneline"], matches["multiline"] 119 | # assert s or d or c or m, f"bad match: {m} {matches}" 120 | if s or d or c or m: 121 | n += 1 122 | if m: 123 | assert "OK" in m and "KO" not in m 124 | if s: 125 | assert "KO" in s and "OK" not in s 126 | if d: 127 | assert "KO" in d and "OK" not in d 128 | if c: 129 | assert "KO" in c and "OK" not in c 130 | assert n == 13 131 | 132 | 133 | COMMENT_UNCOMMENT = [ 134 | ("", ""), 135 | ("hello", "hello"), 136 | ("world!\n", "world!\n"), 137 | ("/**/", ""), 138 | ("/*+ hint */", "/*+ hint */"), 139 | ("x/*\n*/y\n", "xy\n"), 140 | ("-- /* */\n", "-- /* */\n"), 141 | ("-- /* */", "-- /* */"), 142 | ("'/* */'", "'/* */'"), 143 | ("--\n/* */X\n", "--\nX\n"), 144 | ] 145 | 146 | 147 | def test_uncomment(): 148 | n = 0 149 | for c, u in COMMENT_UNCOMMENT: 150 | n += 1 151 | assert _remove_ml_comments(c) == u 152 | assert n == len(COMMENT_UNCOMMENT) 153 | -------------------------------------------------------------------------------- /tests/test_asyncpg.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | import pytest 3 | import aiosql 4 | import run_tests as t 5 | import utils as u 6 | 7 | try: 8 | import asyncpg 9 | import pytest_asyncio 10 | except ModuleNotFoundError as m: 11 | pytest.skip(f"missing module: {m}", allow_module_level=True) 12 | 13 | pytestmark = [ 14 | pytest.mark.postgres, 15 | # pytest.mark.asyncio, 16 | pytest.mark.skipif(not u.has_pkg("pytest_postgresql"), reason="no pytest_postgresql"), 17 | pytest.mark.skipif(not u.has_pkg("pytest_asyncio"), reason="no pytest_asyncio"), 18 | ] 19 | 20 | @pytest.fixture(scope="module") 21 | def driver(): 22 | return "asyncpg" 23 | 24 | @pytest.fixture(scope="module") 25 | def date(): 26 | return datetime.date 27 | 28 | @pytest_asyncio.fixture 29 | async def rconn(pg_dsn): 30 | conn = await asyncpg.connect(pg_dsn) 31 | yield conn 32 | await conn.close() 33 | 34 | @pytest_asyncio.fixture 35 | async def aconn(pg_db): 36 | yield pg_db 37 | 38 | @pytest_asyncio.fixture 39 | async def dconn(aconn): 40 | # FIXME dict row? 41 | yield aconn 42 | 43 | from run_tests import ( 44 | run_async_sanity as test_async_sanity, 45 | run_async_record_query as test_async_record_query, 46 | run_async_parameterized_query as test_async_parameterized_query, 47 | run_async_parameterized_record_query as test_async_parameterized_record_query, 48 | run_async_record_class_query as test_async_record_class_query, 49 | run_async_select_cursor_context_manager as test_async_select_cursor_context_manager, 50 | run_async_select_one as test_async_select_one, 51 | run_async_select_value as test_async_select_value, 52 | run_async_insert_returning as test_async_insert_returning, 53 | run_async_delete as test_async_delete, 54 | run_async_insert_many as test_async_insert_many, 55 | run_async_execute_script as test_async_execute_script, 56 | ) 57 | 58 | # TODO other pools? 59 | @pytest.mark.asyncio 60 | async def test_with_pool(pg_dsn, queries, pg_db): 61 | async with asyncpg.create_pool(pg_dsn) as pool: 62 | async with pool.acquire() as conn: 63 | await t.run_async_insert_returning(conn, queries, datetime.date) 64 | 65 | @pytest.mark.asyncio 66 | async def test_async_methods(pg_dsn, queries, pg_db): 67 | async with asyncpg.create_pool(pg_dsn) as pool: 68 | await t.run_async_methods(pool, queries) 69 | 70 | @pytest.mark.asyncio 71 | async def test_no_publish(aconn, queries): 72 | # TODO move in run 73 | no_publish = queries.f("blogs.no_publish") 74 | res = await no_publish(aconn) 75 | assert res is None 76 | 77 | def test_many_replacements(pg_dsn, queries): 78 | """If the replacement was longer than the variable, bad SQL was generated. 79 | 80 | The variable replacement code had a bug that caused it to miscalculate where in the 81 | original string to put the placeholders. The SQL below would produce a query that 82 | ended with "$8, $9, $10$11:k);" because of this bug. 83 | 84 | This test would fail before the bug was fixed and passes afterward. 85 | 86 | This issue was reported in https://github.com/nackjicholson/aiosql/issues/90. 87 | """ 88 | 89 | sql = """ 90 | -- name: test Any: ... # pragma: no cover 53 | 54 | 55 | # Can't make this a recursive type in terms of itself 56 | # QueryDataTree = dict[str, QueryDatum|"QueryDataTree"] 57 | QueryDataTree = dict[str, QueryDatum|dict] 58 | 59 | class SyncDriverAdapterProtocol(Protocol): 60 | 61 | def process_sql( 62 | self, query_name: str, op_type: SQLOperationType, sql: str 63 | ) -> str: ... # pragma: no cover 64 | 65 | def select( 66 | self, 67 | conn: Any, 68 | query_name: str, 69 | sql: str, 70 | parameters: ParamType, 71 | record_class: Callable|None, 72 | ) -> Generator[Any, None, None]: ... # pragma: no cover 73 | 74 | def select_one( 75 | self, 76 | conn: Any, 77 | query_name: str, 78 | sql: str, 79 | parameters: ParamType, 80 | record_class: Callable|None, 81 | ) -> tuple[Any, ...]|None: ... # pragma: no cover 82 | 83 | def select_value( 84 | self, conn: Any, query_name: str, sql: str, parameters: ParamType 85 | ) -> Any|None: ... # pragma: no cover 86 | 87 | def select_cursor( 88 | self, conn: Any, query_name: str, sql: str, parameters: ParamType 89 | ) -> ContextManager[Any]: ... # pragma: no cover 90 | 91 | def insert_update_delete( 92 | self, conn: Any, query_name: str, sql: str, parameters: ParamType 93 | ) -> int: ... # pragma: no cover 94 | 95 | def insert_update_delete_many( 96 | self, conn: Any, query_name: str, sql: str, parameters: ParamType 97 | ) -> int: ... # pragma: no cover 98 | 99 | def insert_returning( 100 | self, conn: Any, query_name: str, sql: str, parameters: ParamType 101 | ) -> Any|None: ... # pragma: no cover 102 | 103 | def execute_script(self, conn: Any, sql: str) -> str: ... # pragma: no cover 104 | 105 | 106 | class AsyncDriverAdapterProtocol(Protocol): 107 | 108 | def process_sql( 109 | self, query_name: str, op_type: SQLOperationType, sql: str 110 | ) -> str: ... # pragma: no cover 111 | 112 | def select( 113 | self, 114 | conn: Any, 115 | query_name: str, 116 | sql: str, 117 | parameters: ParamType, 118 | record_class: Callable|None, 119 | ) -> collections.abc.AsyncGenerator[Any, None]: ... # pragma: no cover 120 | 121 | async def select_one( 122 | self, 123 | conn: Any, 124 | query_name: str, 125 | sql: str, 126 | parameters: ParamType, 127 | record_class: Callable|None, 128 | ) -> Any|None: ... # pragma: no cover 129 | 130 | async def select_value( 131 | self, conn: Any, query_name: str, sql: str, parameters: ParamType 132 | ) -> Any|None: ... # pragma: no cover 133 | 134 | def select_cursor( 135 | self, conn: Any, query_name: str, sql: str, parameters: ParamType 136 | ) -> AsyncContextManager[Any]: ... # pragma: no cover 137 | 138 | # TODO: Next major version introduce a return? Optional return? 139 | async def insert_update_delete( 140 | self, conn: Any, query_name: str, sql: str, parameters: ParamType 141 | ) -> None: ... # pragma: no cover 142 | 143 | # TODO: Next major version introduce a return? Optional return? 144 | async def insert_update_delete_many( 145 | self, conn: Any, query_name: str, sql: str, parameters: ParamType 146 | ) -> None: ... # pragma: no cover 147 | 148 | async def insert_returning( 149 | self, conn: Any, query_name: str, sql: str, parameters: ParamType 150 | ) -> Any|None: ... # pragma: no cover 151 | 152 | async def execute_script(self, conn: Any, sql: str) -> str: ... # pragma: no cover 153 | 154 | 155 | DriverAdapterProtocol = SyncDriverAdapterProtocol|AsyncDriverAdapterProtocol 156 | -------------------------------------------------------------------------------- /aiosql/adapters/asyncpg.py: -------------------------------------------------------------------------------- 1 | from collections import defaultdict 2 | from contextlib import asynccontextmanager 3 | 4 | from ..utils import VAR_REF 5 | 6 | 7 | class MaybeAcquire: 8 | def __init__(self, client, driver=None): 9 | self.client = client 10 | self._driver = driver 11 | 12 | async def __aenter__(self): 13 | if "acquire" in dir(self.client): 14 | self._managed_conn = await self.client.acquire() 15 | return self._managed_conn 16 | else: 17 | self._managed_conn = None 18 | return self.client 19 | 20 | async def __aexit__(self, exc_type, exc, tb): 21 | if self._managed_conn is not None: 22 | await self.client.release(self._managed_conn) 23 | 24 | 25 | class AsyncPGAdapter: 26 | is_aio_driver = True 27 | 28 | def __init__(self): 29 | self.var_sorted = defaultdict(list) 30 | 31 | def process_sql(self, query_name, _op_type, sql): 32 | """asyncpg seems to only support numeric.""" 33 | adj = 0 34 | 35 | for match in VAR_REF.finditer(sql): 36 | gd = match.groupdict() 37 | # Do nothing if the match is found within quotes. 38 | if gd["dquote"] is not None or gd["squote"] is not None: 39 | continue 40 | 41 | var_name = gd["var_name"] 42 | if var_name in self.var_sorted[query_name]: 43 | replacement = f"${self.var_sorted[query_name].index(var_name) + 1}" 44 | else: 45 | replacement = f"${len(self.var_sorted[query_name]) + 1}" 46 | self.var_sorted[query_name].append(var_name) 47 | 48 | # Determine the offset of the start and end of the original 49 | # variable that we are replacing, taking into account an adjustment 50 | # factor based on previous replacements (see the note below). 51 | start = match.start() + len(gd["lead"]) + adj 52 | end = match.end() + adj 53 | 54 | sql = sql[:start] + replacement + sql[end:] 55 | 56 | # If the replacement and original variable were different lengths, 57 | # then the offsets of subsequent matches will be wrong by the 58 | # difference. Calculate an adjustment to apply to reconcile those 59 | # offsets with the modified string. 60 | # 61 | # The "- 1" is to account for the leading ":" character in the 62 | # original string. 63 | adj += len(replacement) - len(var_name) - 1 64 | 65 | return sql 66 | 67 | def maybe_order_params(self, query_name, parameters): 68 | if isinstance(parameters, dict): 69 | return [parameters[rk] for rk in self.var_sorted[query_name]] 70 | elif isinstance(parameters, tuple): 71 | return parameters 72 | else: 73 | raise ValueError(f"Parameters expected to be dict or tuple, received {parameters}") 74 | 75 | async def select(self, conn, query_name, sql, parameters, record_class=None): 76 | parameters = self.maybe_order_params(query_name, parameters) 77 | async with MaybeAcquire(conn) as connection: 78 | results = await connection.fetch(sql, *parameters) 79 | if record_class is not None: 80 | for rec in results: 81 | yield record_class(**dict(rec)) 82 | else: 83 | for rec in results: 84 | yield rec 85 | 86 | async def select_one(self, conn, query_name, sql, parameters, record_class=None): 87 | parameters = self.maybe_order_params(query_name, parameters) 88 | async with MaybeAcquire(conn) as connection: 89 | result = await connection.fetchrow(sql, *parameters) 90 | if result is not None and record_class is not None: 91 | result = record_class(**dict(result)) 92 | return result 93 | 94 | async def select_value(self, conn, query_name, sql, parameters): 95 | parameters = self.maybe_order_params(query_name, parameters) 96 | async with MaybeAcquire(conn) as connection: 97 | return await connection.fetchval(sql, *parameters) 98 | 99 | @asynccontextmanager 100 | async def select_cursor(self, conn, query_name, sql, parameters): 101 | parameters = self.maybe_order_params(query_name, parameters) 102 | async with MaybeAcquire(conn) as connection: 103 | stmt = await connection.prepare(sql) 104 | async with connection.transaction(): 105 | yield stmt.cursor(*parameters) 106 | 107 | async def insert_returning(self, conn, query_name, sql, parameters): 108 | parameters = self.maybe_order_params(query_name, parameters) 109 | async with MaybeAcquire(conn) as connection: 110 | res = await connection.fetchrow(sql, *parameters) 111 | if res: 112 | return res[0] if len(res) == 1 else res 113 | else: 114 | return None 115 | 116 | async def insert_update_delete(self, conn, query_name, sql, parameters): 117 | parameters = self.maybe_order_params(query_name, parameters) 118 | async with MaybeAcquire(conn) as connection: 119 | # TODO extract integer last result 120 | return await connection.execute(sql, *parameters) 121 | 122 | async def insert_update_delete_many(self, conn, query_name, sql, parameters): 123 | parameters = [self.maybe_order_params(query_name, params) for params in parameters] 124 | async with MaybeAcquire(conn) as connection: 125 | return await connection.executemany(sql, parameters) 126 | 127 | async def execute_script(self, conn, sql): 128 | async with MaybeAcquire(conn) as connection: 129 | return await connection.execute(sql) 130 | -------------------------------------------------------------------------------- /aiosql/aiosql.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | from typing import Callable, Type, Any 3 | 4 | from .adapters.aiosqlite import AioSQLiteAdapter 5 | from .adapters.asyncpg import AsyncPGAdapter 6 | from .adapters.pyformat import PyFormatAdapter 7 | from .adapters.mysql import BrokenMySQLAdapter 8 | from .adapters.generic import GenericAdapter 9 | from .adapters.apyformat import AsyncPyFormatAdapter 10 | from .adapters.sqlite3 import SQLite3Adapter 11 | from .adapters.pg8000 import Pg8000Adapter 12 | from .adapters.duckdb import DuckDBAdapter 13 | from .utils import SQLLoadException, log 14 | from .queries import Queries 15 | from .query_loader import QueryLoader 16 | from .types import DriverAdapterProtocol 17 | 18 | _ADAPTERS: dict[str, Callable[..., DriverAdapterProtocol]] = { 19 | "aiosqlite": AioSQLiteAdapter, # type: ignore 20 | "apsw": GenericAdapter, 21 | "apsycopg": AsyncPyFormatAdapter, # type: ignore 22 | "asyncpg": AsyncPGAdapter, # type: ignore 23 | "duckdb": DuckDBAdapter, 24 | "mariadb": BrokenMySQLAdapter, 25 | "mysqldb": BrokenMySQLAdapter, 26 | "mysql-connector": PyFormatAdapter, 27 | "pg8000": Pg8000Adapter, 28 | "psycopg": PyFormatAdapter, 29 | "psycopg2": PyFormatAdapter, 30 | "pygresql": PyFormatAdapter, 31 | "pymssql": PyFormatAdapter, 32 | "pymysql": BrokenMySQLAdapter, 33 | "sqlite3": SQLite3Adapter, 34 | } 35 | """Map adapter names to their adapter class.""" 36 | 37 | 38 | def register_adapter(name: str, adapter: Callable[..., DriverAdapterProtocol]): 39 | """Register or override an adapter.""" 40 | if name.lower() in _ADAPTERS: 41 | log.debug(f"overriding aiosql adapter {name}") 42 | _ADAPTERS[name.lower()] = adapter 43 | 44 | 45 | def _make_driver_adapter( 46 | driver_adapter: str|Callable[..., DriverAdapterProtocol], 47 | *args, ** kwargs 48 | ) -> DriverAdapterProtocol: 49 | """Get the driver adapter instance registered by the `driver_name`.""" 50 | if isinstance(driver_adapter, str): 51 | try: 52 | adapter = _ADAPTERS[driver_adapter.lower()] 53 | except KeyError: 54 | raise ValueError(f"Encountered unregistered driver_adapter: {driver_adapter}") 55 | # try some guessing if it is a PEP249 module 56 | elif hasattr(driver_adapter, "paramstyle"): 57 | style = getattr(driver_adapter, "paramstyle") # avoid mypy warning? 58 | if style == "pyformat": 59 | adapter = PyFormatAdapter # type: ignore 60 | elif style == "named": 61 | adapter = GenericAdapter # type: ignore 62 | else: 63 | raise ValueError(f"Unexpected driver: {driver_adapter} ({style})") 64 | # so, can we just call it? 65 | elif callable(driver_adapter): # pragma: no cover 66 | adapter = driver_adapter 67 | else: 68 | raise ValueError(f"Unexpected driver_adapter: {driver_adapter}") 69 | 70 | return adapter(*args, **kwargs) 71 | 72 | 73 | def from_str( 74 | sql: str, 75 | driver_adapter: str|Callable[..., DriverAdapterProtocol], 76 | record_classes: dict|None = None, 77 | kwargs_only: bool = True, 78 | attribute: str|None = "__", 79 | args: list[Any] = [], 80 | kwargs: dict[str, Any] = {}, 81 | loader_cls: Type[QueryLoader] = QueryLoader, 82 | queries_cls: Type[Queries] = Queries, 83 | ): 84 | """Load queries from a SQL string. 85 | 86 | **Parameters:** 87 | 88 | - **sql** - A string containing SQL statements and aiosql name. 89 | - **driver_adapter** - Either a string to designate one of the aiosql built-in database driver 90 | adapters. One of many available for SQLite, Postgres and MySQL. If you have defined your 91 | own adapter class, you can pass it's constructor. 92 | - **kwargs_only** - *(optional)* whether to only use named parameters on query execution, default is *True*. 93 | - **attribute** - *(optional)* ``.`` attribute access substitution, defaults to ``"__"``, *None* disables 94 | the feature. 95 | - **args** - *(optional)* adapter creation args (list), forwarded to cursor creation by default. 96 | - **kwargs** - *(optional)* adapter creation args (dict), forwarded to cursor creation by default. 97 | - **record_classes** - *(optional)* **DEPRECATED** Mapping of strings used in "record_class" 98 | declarations to the python classes which aiosql should use when marshaling SQL results. 99 | - **loader_cls** - *(optional)* Custom constructor for QueryLoader extensions. 100 | - **queries_cls** - *(optional)* Custom constructor for Queries extensions. 101 | 102 | **Returns:** ``Queries`` 103 | 104 | Usage: 105 | 106 | Loading queries from a SQL string. 107 | 108 | .. code-block:: python 109 | 110 | import sqlite3 111 | import aiosql 112 | 113 | sql_text = \"\"\" 114 | -- name: get-all-greetings 115 | -- Get all the greetings in the database 116 | select * from greetings; 117 | 118 | -- name: get-user-by-username^ 119 | -- Get all the users from the database, 120 | -- and return it as a dict 121 | select * from users where username = :username; 122 | \"\"\" 123 | 124 | queries = aiosql.from_str(sql_text, "sqlite3") 125 | queries.get_all_greetings(conn) 126 | queries.get_user_by_username(conn, username="willvaughn") 127 | """ 128 | adapter = _make_driver_adapter(driver_adapter, *args, **kwargs) 129 | query_loader = loader_cls(adapter, record_classes, attribute=attribute) 130 | query_data = query_loader.load_query_data_from_sql(sql, []) 131 | return queries_cls(adapter, kwargs_only=kwargs_only).load_from_list(query_data) 132 | 133 | 134 | def from_path( 135 | sql_path: str|Path, 136 | driver_adapter: str|Callable[..., DriverAdapterProtocol], 137 | record_classes: dict|None = None, 138 | kwargs_only: bool = True, 139 | attribute: str|None = "__", 140 | args: list[Any] = [], 141 | kwargs: dict[str, Any] = {}, 142 | loader_cls: Type[QueryLoader] = QueryLoader, 143 | queries_cls: Type[Queries] = Queries, 144 | ext: tuple[str] = (".sql",), 145 | encoding=None, 146 | ): 147 | """Load queries from a `.sql` file, or directory of `.sql` files. 148 | 149 | **Parameters:** 150 | 151 | - **sql_path** - Path to a `.sql` file or directory containing `.sql` files. 152 | - **driver_adapter** - Either a string to designate one of the aiosql built-in database driver 153 | adapters. One of many available for SQLite, Postgres and MySQL. If you have defined your own 154 | adapter class, you may pass its constructor. 155 | - **record_classes** - *(optional)* **DEPRECATED** Mapping of strings used in "record_class" 156 | - **kwargs_only** - *(optional)* Whether to only use named parameters on query execution, default is *True*. 157 | - **attribute** - *(optional)* ``.`` attribute access substitution, defaults to ``"__""``, *None* disables 158 | the feature. 159 | - **args** - *(optional)* adapter creation args (list), forwarded to cursor creation by default. 160 | - **kwargs** - *(optional)* adapter creation args (dict), forwarded to cursor creation by default. 161 | declarations to the python classes which aiosql should use when marshaling SQL results. 162 | - **loader_cls** - *(optional)* Custom constructor for `QueryLoader` extensions. 163 | - **queries_cls** - *(optional)* Custom constructor for `Queries` extensions. 164 | - **ext** - *(optional)* allowed file extensions for query files, default is `(".sql",)`. 165 | - **encoding** - *(optional)* encoding for reading files. 166 | 167 | **Returns:** `Queries` 168 | 169 | Usage: 170 | 171 | .. code-block:: python 172 | 173 | queries = aiosql.from_path("./sql", "psycopg2") 174 | queries = aiosql.from_path("./sql", MyDBAdapter) 175 | """ 176 | path = Path(sql_path) 177 | 178 | if not path.exists(): 179 | raise SQLLoadException(f"File does not exist: {path}") 180 | 181 | adapter = _make_driver_adapter(driver_adapter, *args, **kwargs) 182 | query_loader = loader_cls(adapter, record_classes, attribute=attribute) 183 | 184 | if path.is_file(): 185 | query_data = query_loader.load_query_data_from_file(path, encoding=encoding) 186 | return queries_cls(adapter, kwargs_only=kwargs_only).load_from_list(query_data) 187 | elif path.is_dir(): 188 | query_data_tree = query_loader.load_query_data_from_dir_path( 189 | path, ext=ext, encoding=encoding 190 | ) 191 | return queries_cls(adapter, kwargs_only=kwargs_only).load_from_tree(query_data_tree) 192 | else: # pragma: no cover 193 | raise SQLLoadException(f"The sql_path must be a directory or file, got {sql_path}") 194 | -------------------------------------------------------------------------------- /docs/source/defining-sql-queries.rst: -------------------------------------------------------------------------------- 1 | Defining SQL Queries 2 | ==================== 3 | 4 | Query Names 5 | ----------- 6 | 7 | Name definitions are how aiosql determines the name of the methods that SQL 8 | code blocks are accessible by. 9 | A query name is defined by a SQL comment of the form ``"-- name: "``. 10 | As a readability convenience, dash characters (``-``) in the name are turned 11 | into underlines (``_``). 12 | 13 | .. literalinclude:: ../../tests/blogdb/sql/blogs/blogs.sql 14 | :language: sql 15 | :lines: 14,16 16 | 17 | This query will be available in aiosql under the python method name ``.get_all_blogs(conn)`` 18 | 19 | Query Comments 20 | -------------- 21 | 22 | .. literalinclude:: ../../tests/blogdb/sql/blogs/blogs.sql 23 | :language: sql 24 | :lines: 14-16 25 | 26 | Any other SQL comments you make between the name definition and your code will 27 | be used a the python documentation string for the generated method. 28 | You can use ``help()`` in the Python REPL to view these comments while using python. 29 | 30 | .. 31 | FIXME method parameters are not shown… 32 | 33 | .. code:: pycon 34 | 35 | Python 3 … on Linux 36 | Type "help", "copyright", "credits" or "license" for more information. 37 | >>> import aiosql 38 | >>> queries = aiosql.from_path("blogs.sql", "sqlite3") 39 | >>> help(queries.get_all_blogs) 40 | Help on method get_all_blogs in module aiosql.queries: 41 | 42 | get_all_blogs(conn, *args, **kwargs) method of aiosql.queries.Queries instance 43 | Fetch all fields for every blog in the database. 44 | 45 | Named Parameters 46 | ---------------- 47 | 48 | Named parameters ``:param`` are accepted by all supported drivers and taken 49 | from Python named parameters passed to the query. 50 | In addition, simple attributes can be referenced with the ``.``-syntax. 51 | 52 | .. literalinclude:: ../../tests/blogdb/sql/blogs/blogs.sql 53 | :language: sql 54 | :lines: 61-62 55 | 56 | Then the generated function expects two named parameters: 57 | 58 | .. code:: python 59 | 60 | res = queries.with_params(name="Calvin", x=(1+1j)) 61 | # => (6, 2.0) 62 | 63 | Parameter Declarations 64 | ---------------------- 65 | 66 | Query parameter names may be declared in parentheses just after the method name. 67 | 68 | .. literalinclude:: ../../tests/blogdb/sql/blogs/blogs.sql 69 | :language: sql 70 | :lines: 55,56 71 | 72 | When declared they are checked, raising errors when parameters are unused or undeclared. 73 | 74 | Operators 75 | --------- 76 | 77 | This section describes the usage of various query operator symbols that you can 78 | annotate query names with in order to direct how aiosql will execute and return 79 | results. 80 | 81 | No Operator (Default) 82 | ~~~~~~~~~~~~~~~~~~~~~ 83 | 84 | In the above `Query Names <#query-names>`__ section the ``get-all-blogs`` 85 | name is written without any trailing operators. 86 | 87 | .. literalinclude:: ../../tests/blogdb/sql/blogs/blogs.sql 88 | :language: sql 89 | :lines: 14 90 | 91 | The lack of an explicit operator tells aiosql to execute the query and 92 | to return **all** the results. 93 | In the case of ``get-all-blogs`` that means a ``select`` statement will be 94 | executed and all the resulting rows will be returned. 95 | When writing your application you will often need to perform other operations 96 | besides ``select``, like ``insert``, ``delete``, and perhaps bulk operations. 97 | The operators detailed in the next sections let you declare in your SQL code 98 | how that query should be executed by a Python database driver. 99 | 100 | ``^`` Select One 101 | ~~~~~~~~~~~~~~~~ 102 | 103 | The ``^`` operator executes a query and returns the **first row** of a result set. 104 | When there are no rows in the result set it returns ``None``. 105 | This is useful when you know there should be one, and exactly one result from your query. 106 | 107 | As an example, if you have a unique constraint on the ``username`` field in your 108 | ``users`` table which makes it impossible for two users to share the same username, 109 | you could use ``^`` to direct aiosql to select a single user rather than a list of 110 | rows of length 1. 111 | 112 | .. literalinclude:: ../../tests/blogdb/sql/users/users.sql 113 | :language: sql 114 | :lines: 8-14 115 | 116 | When used from Python this query will either return ``None`` or the singular selected row. 117 | 118 | .. code:: python 119 | 120 | queries.get_user_by_username(conn, username="willvaughn") 121 | # => (1, "willvaughn", "William Vaughn") or None 122 | 123 | ``$`` Select Value 124 | ~~~~~~~~~~~~~~~~~~ 125 | 126 | The ``$`` operator will execute the query, and only return the **first value of the first row** 127 | of a result set. If there are no rows in the result set it returns ``None``. 128 | This is implemented by returing the first element of the tuple returned by ``cur.fetchone()`` 129 | from the underlying driver. 130 | This is mostly useful for queries returning IDs, COUNTs or other aggregates. 131 | 132 | .. literalinclude:: ../../tests/blogdb/sql/users/users.sql 133 | :language: sql 134 | :lines: 30,32 135 | 136 | When used from Python: 137 | 138 | .. code:: python 139 | 140 | queries.get_count(conn) 141 | # => 3 or None 142 | 143 | ``!`` Insert/Update/Delete 144 | ~~~~~~~~~~~~~~~~~~~~~~~~~~ 145 | 146 | The ``!`` operator executes SQL without returning any results. 147 | It is meant for statements that use ``insert``, ``update``, and ``delete`` to make 148 | modifications to database rows without a necessary return value. 149 | 150 | .. literalinclude:: ../../tests/blogdb/sql/blogs/blogs.sql 151 | :language: sql 152 | :lines: 64-66,32,34 153 | 154 | The methods generated are: 155 | 156 | .. code:: python 157 | 158 | def new_blog(conn, userid: int, title: str, content: str) -> int: 159 | pass 160 | 161 | def remove_blog(conn, blogid: int) -> int: 162 | pass 163 | 164 | Each can be called to alter the database, and returns the number of affected rows 165 | if available. 166 | 167 | Note that some SQL databases allow to return a relation after ``insert``, 168 | ``update`` or ``delete`` by using a ``returning`` clause. 169 | For such queries the result is a relation like a ``select``, so the same operators 170 | apply: 171 | 172 | .. literalinclude:: ../../tests/blogdb/sql/blogs/blogs.sql 173 | :language: sql 174 | :lines: 68-71 175 | 176 | .. code:: python 177 | 178 | blogid = queries.publish_new_blog(conn, userid=1, title="AioSQL New Features", content="…") 179 | 180 | ```__. 190 | 191 | As recent version of SQLite do support the ``returning`` clause, simply forget 192 | about this, use the clause explicitely and treat the whole command as a standard 193 | select with the *empty* operator (relation), or ``^`` (tuple), or ``$`` (scalar). 194 | 195 | .. literalinclude:: ../../tests/blogdb/sql/blogs/li/blogs.sql 196 | :language: sql 197 | :lines: 39-41 198 | 199 | Executing this query in python will return the ``blogid`` of the inserted row. 200 | 201 | .. code:: python 202 | 203 | blogid = queries.publish_a_blog(conn, userid=1, title="Hi", content="blah blah.") 204 | 205 | ``*!`` Insert/Update/Delete Many 206 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 207 | 208 | The ``*!`` operator directs aiosql to execute a SQL statement over all items of a given sequence. 209 | Under the hood this calls the ``executemany`` method of many database drivers. 210 | See `sqlite3 Cursor.executemany `__ 211 | for an example. 212 | 213 | In aiosql we can use this for a bulk publish method that operates over a list of blog entries. 214 | 215 | .. literalinclude:: ../../tests/blogdb/sql/blogs/pg/blogs.sql 216 | :language: sql 217 | :lines: 50-53 218 | 219 | .. code:: python 220 | 221 | queries = aiosql.from_path("blogs.sql", "psycopg2") 222 | blogs = [ 223 | {"userid": 1, "title": "First Blog", "content": "...", "published": datetime(2018, 1, 1)}, 224 | {"userid": 1, "title": "Next Blog", "content": "...", "published": datetime(2018, 1, 2)}, 225 | {"userid": 2, "title": "Hey, Hey!", "content": "...", "published": datetime(2018, 7, 28)}, 226 | ] 227 | queries.bulk_publish(conn, blogs) 228 | 229 | The methods returns the number of affected rows, if available. 230 | 231 | ``#`` Execute Scripts 232 | ~~~~~~~~~~~~~~~~~~~~~ 233 | 234 | Using this operarator will execute sql statements as a script. 235 | You can't do variable substitution with the ``#`` operator. 236 | An example usecase is using data definition statements like create table in order to setup a database. 237 | 238 | .. literalinclude:: ../../tests/blogdb/sql/blogs/pg/blogs.sql 239 | :language: sql 240 | :lines: 9-16 241 | 242 | .. code:: python 243 | 244 | queries = aiosql.from_path("create_schema.sql", "sqlite3") 245 | queries.create_table_blogs(conn) 246 | 247 | Note: SQL scripts do not accept parameters. 248 | -------------------------------------------------------------------------------- /docs/source/versions.rst: -------------------------------------------------------------------------------- 1 | AioSQL - Versions 2 | ================= 3 | 4 | 14.1 on 2025-11-27 5 | ------------------ 6 | 7 | - minor documentation fix 8 | 9 | 14.0 on 2025-11-21 10 | ------------------ 11 | 12 | - improved documentation 13 | - minor packaging fix 14 | - only right strip queries to improve traces readability 15 | - remove Python 3.9 support 16 | - modernize Python type declarations 17 | - make asynchronous select return an asynchronous generator 18 | - add `apsycopg` driver for asynchronous psycopg3 19 | - update GitHub CI configuration 20 | 21 | 13.4 on 2025-04-09 22 | ------------------ 23 | 24 | - update GitHub CI configuration. 25 | - use SPDX format for licensing informations and add topics. 26 | - doc, separate backlog from versions. 27 | 28 | 13.3 on 2025-03-07 29 | ------------------ 30 | 31 | - rework dependencies. 32 | - enable *PyPy 3.11*, *Python 3.13t* and *Python 3.14* in GitHub CI. 33 | 34 | 13.2 on 2025-01-29 35 | ------------------ 36 | 37 | - improve empty query handling. 38 | - update documentation. 39 | 40 | 13.1 on 2025-01-23 41 | ------------------ 42 | 43 | - fix warning repetition and display for missing `!` on non-SELECT. 44 | - improve documentation with declared parameters in examples. 45 | - homogeneise test consistency wrt attribute and parameter names. 46 | - fix doc typos. 47 | 48 | 13.0 on 2024-11-10 49 | ------------------ 50 | 51 | - change `kwargs_only` parameter default value to _True_. **Compatibility break.** 52 | - add optional parameter declarations to queries, and check them when provided. 53 | - forbid positional parameters when named parameters are declared. 54 | - warn on probable missing operation. 55 | - silent some test warnings. 56 | - add *psycopg2* back to CI with Python 3.13. 57 | - improve documentation. 58 | - improve Makefile. 59 | 60 | 12.2 on 2024-10-02 61 | ------------------ 62 | 63 | - fix included source lines in documentation. 64 | 65 | 12.1 on 2024-10-01 66 | ------------------ 67 | 68 | - drop support for *Python 3.8*. 69 | - enable *DuckDB* with *Python 3.13*. 70 | - fix duckdb adapter for *DuckDB 1.1*. 71 | 72 | 12.0 on 2024-09-07 73 | ------------------ 74 | 75 | - add official support for MS SQL Server with `pymssql`. 76 | - pass misc parameters to cursor in generic adapter. 77 | - further improve typing to please pyright. 78 | - minor doc fixes… 79 | - improve one error message. 80 | - reduce verbosity when overriding an adapter. 81 | - refactor tests, simplifying the structure and adding over 50 tests. 82 | in particular, schema creation now relies on *aiosql* features 83 | instead of using driver capabilities directly. 84 | 85 | 11.1 on 2024-08-20 86 | ------------------ 87 | 88 | - improve documentation. 89 | - upgrade sphinx and corresponding read-the-doc theme. 90 | 91 | 11.0 on 2024-08-17 92 | ------------------ 93 | 94 | - update and improve documentation. 95 | - do not allow to override existing queries, as it can lead to hard to 96 | understand bugs. 97 | - use ``pytest.fail`` instead of ``assert False`` in tests. 98 | 99 | 10.4 on 2024-08-08 100 | ------------------ 101 | 102 | - add history of version changes in the documentation (this file!). 103 | - improve comments and doc strings. 104 | 105 | 10.3 on 2024-08-03 106 | ------------------ 107 | 108 | - add *Python 3.13* and *PyPy 3.10* 109 | 110 | 10.2 on 2024-05-29 111 | ------------------ 112 | 113 | - exclude SQL hints from pattern matching on C comments. 114 | - improve tests about SQL comments. 115 | 116 | 10.1 on 2024-03-06 117 | ------------------ 118 | 119 | - drop ``black`` and ``flake8`` checks, add ``ruff`` instead. 120 | - upgrade doc build GitHub CI version. 121 | 122 | 10.0 on 2024-03-02 123 | ------------------ 124 | 125 | - add ``:object.attribute`` support to reference object attributes in queries. 126 | - add tests about these with dataclasses. 127 | 128 | 9.5 on 2024-02-18 129 | ----------------- 130 | 131 | - add ``duckdb`` support for *Python 3.12* CI. 132 | 133 | 9.4 on 2024-01-28 134 | ----------------- 135 | 136 | - upgrade non regression tests CI version. 137 | - improve coverage test report. 138 | - add doc strings to more methods. 139 | - add ``kwargs*only`` option to fail on simple args. 140 | - add relevant tests about previous item. 141 | - move various utils in ``Queries``. 142 | - add more or improve static typing hints. 143 | - minor style changes. 144 | 145 | 9.3 on 2024-01-18 146 | ----------------- 147 | 148 | - add pyright check. 149 | - improve generic adapter. 150 | - improve static typing. 151 | 152 | 9.2 on 2023-12-24 153 | ----------------- 154 | 155 | - improve some tests. 156 | - minor improvements for async adapters. 157 | 158 | 9.1 on 2023-12-06 159 | ----------------- 160 | 161 | - add *Python 3.12* to GitHub CI. 162 | - get release number from package meta data. 163 | - update doc relating to ```__ is code. 5 | Write it, version control it, comment it, and run it using files. 6 | Writing your SQL code in Python programs as strings doesn't allow you to easily 7 | reuse them in SQL GUIs or CLI tools like ``psql``. 8 | With aiosql you can organize your SQL statements in *.sql* files, load them 9 | into your python application as methods to call without losing the ability to 10 | use them as you would any other SQL file. 11 | 12 | This project supports standard 13 | `PEP 249 `__ 14 | and 15 | `asyncio `__ 16 | based drivers for 17 | `SQLite `__ 18 | (`sqlite3 `__, 19 | `aiosqlite `__, 20 | `apsw `__), 21 | `PostgreSQL `__ 22 | (`psycopg (3) `__, 23 | `apsycopg (3) `__, 24 | `psycopg2 `__, 25 | `pg8000 `__, 26 | `pygresql `__, 27 | `asyncpg `__), 28 | `MySQL `__ 29 | (`PyMySQL `__, 30 | `mysqlclient `__, 31 | `mysql-connector `__, 32 | `asyncmy `__ with 33 | `this adapter `__), 34 | `MariaDB `__ 35 | (`mariadb `__), 36 | `DuckDB `__ 37 | (`duckdb `__) and 38 | `MS SQL Server `__ 39 | (`pymssql `__), 40 | However, some detailed feature support may vary depending on the underlying driver 41 | and database engine actual capabilities. 42 | 43 | Other SQL database drivers which support the ``pyformat`` or ``named`` 44 | `PEP 249 `__ paramstyles should work as well 45 | by just passing the driver as a parameter when building queries. Thus 46 | `Oracle Database `__ 47 | (`oracledb `__) or 48 | `Snowflake `__ 49 | (`snowflake.connector `__) 50 | should work out of the box… 51 | Please report with an issue if it actually works for you! 52 | Otherwise, extensions to support other database drivers can be written by you! 53 | See: `Database Driver Adapters <./database-driver-adapters.html>`__. 54 | Feel free to pull request! 55 | 56 | This module is an implementation of 57 | `Kris Jenkins' yesql `__ 58 | `Clojure `__ library to the 59 | `Python `__ 60 | `ecosystem `__. 61 | 62 | Badges 63 | ------ 64 | 65 | .. 66 | NOTE :target: is needed so that github renders badges on a line. 67 | .. image:: https://github.com/nackjicholson/aiosql/actions/workflows/aiosql-package.yml/badge.svg?branch=main&style=flat 68 | :alt: Build status 69 | :target: https://github.com/nackjicholson/aiosql/actions/ 70 | .. 71 | NOTE hardcoded, this is maintained manually. 72 | .. image:: https://img.shields.io/badge/coverage-100%25-success 73 | :alt: Code Coverage 74 | :target: https://github.com/nackjicholson/aiosql/actions/ 75 | .. 76 | NOTE all tests 77 | # MISC 78 | loading: 17 79 | patterns: 5 80 | # SYNC 81 | sqlite3: 17 82 | apsw: 16 83 | duckdb: 15 84 | mariadb: 17 85 | pymysql: 16 86 | mysqldb: 15 87 | myco: 16 88 | pymssql: 16 89 | pg8000: 14 90 | psycopg2: 18 91 | psycopg3: 19 92 | pygresql: 15 93 | # ASYNC 94 | aiosqlite: 13 95 | asyncpg: 18 96 | apsycopg3: 14 97 | .. image:: https://img.shields.io/badge/tests-261%20✓-success 98 | :alt: Tests 99 | :target: https://github.com/nackjicholson/aiosql/actions/ 100 | .. image:: https://img.shields.io/github/issues/nackjicholson/aiosql?style=flat 101 | :alt: Issues 102 | :target: https://github.com/nackjicholson/aiosql/issues/ 103 | .. image:: https://img.shields.io/github/contributors/nackjicholson/aiosql 104 | :alt: Contributors 105 | :target: https://github.com/nackjicholson/aiosql/graphs/contributors 106 | .. image:: https://img.shields.io/pypi/dm/aiosql?style=flat 107 | :alt: Pypi Downloads 108 | :target: https://pypistats.org/packages/aiosql 109 | .. image:: https://img.shields.io/github/stars/nackjicholson/aiosql?style=flat&label=Star 110 | :alt: Stars 111 | :target: https://github.com/nackjicholson/aiosql/stargazers 112 | .. image:: https://img.shields.io/pypi/v/aiosql 113 | :alt: Version 114 | :target: https://pypi.org/project/aiosql/ 115 | .. image:: https://img.shields.io/github/languages/code-size/nackjicholson/aiosql?style=flat 116 | :alt: Code Size 117 | :target: https://github.com/nackjicholson/aiosql/ 118 | .. image:: https://img.shields.io/badge/databases-6-informational 119 | :alt: Databases 120 | :target: https://github.com/nackjicholson/aiosql/ 121 | .. image:: https://img.shields.io/badge/drivers-16-informational 122 | :alt: Drivers 123 | :target: https://github.com/nackjicholson/aiosql/ 124 | .. image:: https://img.shields.io/github/languages/count/nackjicholson/aiosql?style=flat 125 | :alt: Language Count 126 | :target: https://en.wikipedia.org/wiki/Programming_language 127 | .. image:: https://img.shields.io/github/languages/top/nackjicholson/aiosql?style=flat 128 | :alt: Top Language 129 | :target: https://en.wikipedia.org/wiki/Python_(programming_language) 130 | .. image:: https://img.shields.io/pypi/pyversions/aiosql?style=flat 131 | :alt: Python Versions 132 | :target: https://www.python.org/ 133 | .. 134 | NOTE some non-sense badge about badges:-) 135 | .. image:: https://img.shields.io/badge/badges-16-informational 136 | :alt: Badges 137 | :target: https://shields.io/ 138 | .. image:: https://img.shields.io/pypi/l/aiosql?style=flat 139 | :alt: BSD 2-Clause License 140 | :target: https://opensource.org/licenses/BSD-2-Clause 141 | 142 | 143 | Usage 144 | ----- 145 | 146 | Install from `pypi `__, for instance by running ``pip install aiosql``. 147 | 148 | Then write parametric SQL queries in a file and execute it from Python methods, 149 | eg this *greetings.sql* file: 150 | 151 | .. code:: sql 152 | 153 | -- name: get_all_greetings() 154 | -- Get all the greetings in the database 155 | select greeting_id, greeting 156 | from greetings 157 | order by 1; 158 | 159 | -- name: get_user_by_username(username)^ 160 | -- Get a user from the database using a named parameter 161 | select user_id, username, name 162 | from users 163 | where username = :username; 164 | 165 | This example has an imaginary SQLite database with greetings and users. 166 | It prints greetings in various languages to the user and showcases the basic 167 | feature of being able to load queries from a SQL file and call them by name 168 | in python code. 169 | Query parameter declarations (eg ``(username)``) are optional, and enforced 170 | when provided. 171 | 172 | You can use ``aiosql`` to load the queries in this file for use in your Python 173 | application: 174 | 175 | .. code:: python 176 | 177 | import aiosql 178 | import sqlite3 179 | 180 | queries = aiosql.from_path("greetings.sql", "sqlite3") 181 | 182 | with sqlite3.connect("greetings.db") as conn: 183 | user = queries.get_user_by_username(conn, username="willvaughn") 184 | # user: (1, "willvaughn", "William") 185 | 186 | for _, greeting in queries.get_all_greetings(conn): 187 | # scan: (1, "Hi"), (2, "Aloha"), (3, "Hola"), … 188 | print(f"{greeting}, {user[2]}!") 189 | # Hi, William! 190 | # Aloha, William! 191 | # … 192 | 193 | Or even in an asynchroneous way, with two SQL queries running in parallel 194 | using ``aiosqlite`` and ``asyncio``: 195 | 196 | .. code:: python 197 | 198 | import asyncio 199 | import aiosql 200 | import aiosqlite 201 | 202 | queries = aiosql.from_path("greetings.sql", "aiosqlite") 203 | 204 | async def main(): 205 | async with aiosqlite.connect("greetings.db") as conn: 206 | user = await queries.get_user_by_username(conn, username="willvaughn") 207 | 208 | async for _, greeting in queries.get_all_greetings(conn): 209 | print(f"{greeting}, {user[2]}!") 210 | 211 | asyncio.run(main()) 212 | 213 | It may seem inconvenient to provide a connection on each call. 214 | You may have a look at the `AnoDB `__ `DB` 215 | class which wraps both a database connection *and* queries in one 216 | connection-like extended object, including performing automatic reconnection 217 | when needed. The wrapper also allows to cache query results. 218 | 219 | Why you might want to use this 220 | ------------------------------ 221 | 222 | * You think SQL is pretty good, and writing SQL is an important part of your applications. 223 | * You don't want to write your SQL in strings intermixed with your python code. 224 | * You're not using an ORM like `SQLAlchemy `__ or 225 | `Django `__ , 226 | with large (100k lines) code imprints vs under 1000 for `aiosql` and about 300 for `anodb`, 227 | and you don't need to or don't want to write SQL-like code with a Python syntax. 228 | * You want to be able to reuse your SQL in other contexts, 229 | eg loading it into `psql` or other database tools. 230 | 231 | 232 | Why you might NOT want to use this 233 | ---------------------------------- 234 | 235 | * You're looking for an `ORM `__. 236 | * You aren't comfortable writing SQL code. 237 | * You don't have anything in your application that requires complicated SQL beyond basic CRUD operations. 238 | * Dynamically loaded objects built at runtime really bother you. 239 | -------------------------------------------------------------------------------- /aiosql/query_loader.py: -------------------------------------------------------------------------------- 1 | import re 2 | import inspect 3 | from pathlib import Path 4 | from typing import Type, Sequence, Any 5 | 6 | from .utils import SQLParseException, SQLLoadException, VAR_REF, VAR_REF_DOT, log 7 | from .types import QueryDatum, QueryDataTree, SQLOperationType, DriverAdapterProtocol 8 | 9 | # identifies name definition comments 10 | _QUERY_DEF = re.compile(r"--\s*name\s*:\s*") 11 | 12 | # identifies record class definition comments 13 | _RECORD_DEF = re.compile(r"--\s*record_class\s*:\s*(\w+)\s*") 14 | 15 | # extract a valid query name followed by an optional operation spec 16 | # FIXME this accepts "1st" but seems to reject "é" 17 | _NAME_OP = re.compile( 18 | # query name 19 | r"^(?P\w+)" 20 | # optional list of parameters (foo, bla) or () 21 | r"(|\((?P(\s*|\s*\w+\s*(,\s*\w+\s*)*))\))" 22 | # operation, empty for simple select 23 | r"(?P(|\^|\$|!|\'(\'\'|[^\'])*\')|" 47 | # double quote strings 48 | r'(?P"(""|[^"])+")|' 49 | # one-line comment 50 | r"(?P--.*?$)|" 51 | # multiline comments, excluding SQL hints 52 | r"|(?P/\*(?!\+[\s\S]*?\*/)[\s\S]*?\*/)", 53 | re.DOTALL | re.MULTILINE, 54 | ) 55 | 56 | 57 | def _remove_ml_comments(code: str) -> str: 58 | """Remove /* ... */ comments from code""" 59 | # identify commented regions to be removed 60 | rm = [] 61 | for m in _UNCOMMENT.finditer(code): 62 | ml = m.groupdict()["multiline"] 63 | if ml: 64 | rm.append(m.span()) 65 | # keep whatever else 66 | ncode, current = "", 0 67 | for start, end in rm: 68 | ncode += code[current:start] 69 | current = end 70 | # get tail 71 | ncode += code[current:] 72 | return ncode 73 | 74 | 75 | def _preprocess_object_attributes(attribute, sql): 76 | """Substitute o.a by oa and keep track of variables.""" 77 | 78 | attributes = {} 79 | 80 | def _replace(m): 81 | gd = m.groupdict() 82 | if gd["dquote"] is not None: 83 | return gd["dquote"] 84 | elif gd["squote"] is not None: 85 | return gd["squote"] 86 | else: 87 | var, att = gd["var_name"].split(".", 1) 88 | var_name = var + attribute + att 89 | if var not in attributes: 90 | attributes[var] = {} 91 | if att not in attributes[var]: 92 | attributes[var][att] = var_name 93 | return f"{gd['lead']}:{var_name}" 94 | 95 | sql = VAR_REF_DOT.sub(_replace, sql) 96 | 97 | return sql, attributes 98 | 99 | 100 | class QueryLoader: 101 | """Load Queries. 102 | 103 | This class holds the various utilities to read SQL files and build 104 | QueryDatum, which will be transformed as functions in Queries. 105 | 106 | - :param driver_adapter: driver name or class. 107 | - :param record_classes: nothing of dict. 108 | - :param attribute: string to insert in place of ``.``. 109 | """ 110 | 111 | def __init__( 112 | self, 113 | driver_adapter: DriverAdapterProtocol, 114 | record_classes: dict[str, Any]|None, 115 | attribute: str|None = None, 116 | ): 117 | self.driver_adapter = driver_adapter 118 | self.record_classes = record_classes if record_classes is not None else {} 119 | self.attribute = attribute 120 | 121 | def _make_query_datum( 122 | self, 123 | query: str, 124 | ns_parts: list[str], 125 | floc: tuple[Path|str, int], 126 | ) -> QueryDatum: 127 | """Build a query datum. 128 | 129 | - :param query: the spec and name (``query-name!\n-- comments\nSQL;\n``) 130 | - :param ns_parts: name space parts, i.e. subdirectories of loaded files 131 | - :param floc: file name and lineno the query was extracted from 132 | """ 133 | lines = [line.rstrip() for line in query.strip().splitlines()] 134 | qname, qop, qsig = self._get_name_op(lines[0]) 135 | if re.search(r"[^A-Za-z0-9_]", qname): 136 | log.warning(f"non ASCII character in query name: {qname}") 137 | if len(lines) <= 1: 138 | raise SQLParseException(f"empty query for: {qname} at {floc[0]}:{floc[1]}") 139 | record_class = self._get_record_class(lines[1]) 140 | sql, doc = self._get_sql_doc(lines[2 if record_class else 1 :]) 141 | if re.search("(?s)^[\t\n\r ;]*$", sql): 142 | raise SQLParseException(f"empty sql for: {qname} at {floc[0]}:{floc[1]}") 143 | signature = self._build_signature(sql, qname, qsig) 144 | query_fqn = ".".join(ns_parts + [qname]) 145 | if self.attribute: # :u.a -> :u__a, **after** signature generation 146 | sql, attributes = _preprocess_object_attributes(self.attribute, sql) 147 | else: # pragma: no cover 148 | attributes = None 149 | sql = self.driver_adapter.process_sql(query_fqn, qop, sql) 150 | return QueryDatum(query_fqn, doc, qop, sql, record_class, signature, floc, attributes, qsig) 151 | 152 | def _get_name_op(self, text: str) -> tuple[str, SQLOperationType, list[str]|None]: 153 | """Extract name, parameters and operation from spec.""" 154 | qname_spec = text.replace("-", "_") 155 | matched = _NAME_OP.match(qname_spec) 156 | if not matched or _BAD_PREFIX.match(qname_spec): 157 | raise SQLParseException(f'invalid query name and operation spec: "{qname_spec}"') 158 | nameop = matched.groupdict() 159 | params, rawparams = None, nameop["params"] 160 | if rawparams is not None: 161 | params = [p.strip() for p in rawparams.split(",")] 162 | if params == ['']: # handle "( )" 163 | params = [] 164 | operation = _OP_TYPES[nameop["op"]] 165 | if params and operation == "#": # pragma: no cover # FIXME it is covered? 166 | raise SQLParseException(f'cannot use named parameters in SQL script: "{qname_spec}"') 167 | return nameop["name"], operation, params 168 | 169 | def _get_record_class(self, text: str) -> Type|None: 170 | """Extract record class from spec.""" 171 | rc_match = _RECORD_DEF.match(text) 172 | rc_name = rc_match.group(1) if rc_match else None 173 | # TODO: Probably will want this to be a class, marshal in, and marshal out 174 | return self.record_classes.get(rc_name) if isinstance(rc_name, str) else None 175 | 176 | def _get_sql_doc(self, lines: Sequence[str]) -> tuple[str, str]: 177 | """Separate SQL-comment documentation and SQL code.""" 178 | doc, sql = "", "" 179 | for line in lines: 180 | doc_match = _SQL_COMMENT.match(line) 181 | if doc_match: 182 | doc += doc_match.group(1) + "\n" 183 | else: 184 | sql += line + "\n" 185 | 186 | return sql.strip(), doc.rstrip() 187 | 188 | def _build_signature(self, sql: str, qname: str, sig: list[str]|None) -> inspect.Signature: 189 | """Return signature object for generated dynamic function.""" 190 | # FIXME what about the connection?! 191 | params = [inspect.Parameter("self", inspect.Parameter.POSITIONAL_OR_KEYWORD)] 192 | names = set() 193 | for match in VAR_REF.finditer(sql): 194 | gd = match.groupdict() 195 | if gd["squote"] or gd["dquote"]: 196 | continue 197 | name = gd["var_name"] 198 | if name.isdigit() or name in names: 199 | continue 200 | if sig is not None: # optional parameter declarations 201 | if name not in sig: 202 | raise SQLParseException(f"undeclared parameter name in query {qname}: {name}") 203 | names.add(name) 204 | params.append( 205 | inspect.Parameter( 206 | name=name, 207 | kind=inspect.Parameter.KEYWORD_ONLY, 208 | ) 209 | ) 210 | if sig is not None and len(sig) != len(names): 211 | unused = sorted(n for n in sig if n not in names) 212 | raise SQLParseException(f"unused declared parameter in query {qname}: {unused}") 213 | return inspect.Signature(parameters=params) 214 | 215 | def load_query_data_from_sql( 216 | self, sql: str, ns_parts: list[str], fname: Path|str = "" 217 | ) -> list[QueryDatum]: 218 | """Load queries from a string.""" 219 | usql = _remove_ml_comments(sql) 220 | qdefs = _QUERY_DEF.split(usql) 221 | # FIXME lineno is from the uncommented file 222 | lineno = 1 + qdefs[0].count("\n") 223 | data = [] 224 | # first item is anything before the first query definition, drop it! 225 | for qdef in qdefs[1:]: 226 | data.append(self._make_query_datum(qdef, ns_parts, (fname, lineno))) 227 | lineno += qdef.count("\n") 228 | return data 229 | 230 | def load_query_data_from_file( 231 | self, path: Path, ns_parts: list[str] = [], encoding=None 232 | ) -> list[QueryDatum]: 233 | """Load queries from a file.""" 234 | return self.load_query_data_from_sql(path.read_text(encoding=encoding), ns_parts, path) 235 | 236 | def load_query_data_from_dir_path( 237 | self, dir_path, ext=(".sql",), encoding=None 238 | ) -> QueryDataTree: 239 | """Load queries from a directory.""" 240 | if not dir_path.is_dir(): 241 | raise ValueError(f"The path {dir_path} must be a directory") 242 | 243 | def _recurse_load_query_data_tree(path, ns_parts=[], ext=(".sql",), encoding=None): 244 | query_data_tree = {} 245 | for p in path.iterdir(): 246 | if p.is_file(): 247 | if p.suffix not in ext: 248 | continue 249 | for query_datum in self.load_query_data_from_file( 250 | p, ns_parts, encoding=encoding 251 | ): 252 | query_data_tree[query_datum.query_name] = query_datum 253 | elif p.is_dir(): 254 | query_data_tree[p.name] = _recurse_load_query_data_tree( 255 | p, ns_parts + [p.name], ext=ext, encoding=encoding 256 | ) 257 | else: # pragma: no cover 258 | # This should be practically unreachable. 259 | raise SQLLoadException(f"The path must be a directory or file, got {p}") 260 | return query_data_tree 261 | 262 | return _recurse_load_query_data_tree(dir_path, ext=ext, encoding=encoding) 263 | -------------------------------------------------------------------------------- /docs/source/getting-started.rst: -------------------------------------------------------------------------------- 1 | Getting Started 2 | =============== 3 | 4 | Philosophy 5 | ---------- 6 | 7 | The aiosql project is for writing SQL to interact with a database. 8 | Most database libraries are intended to reduce the amount of SQL developers need to write, 9 | aiosql takes an alternative approach. 10 | Why? 11 | 12 | - Alternatives are good. No approach fits all use cases, no matter how predominant. 13 | - SQL is the most expressive and performant way to interact with a SQL database. 14 | - Investigating where a query came from is simpler when it is source controlled, named, and written by a human. 15 | - Writing SQL in files gives you built-in compatibility with powerful SQL tools like 16 | `DataGrip `__ and 17 | `psql `__. 18 | 19 | About ORMs 20 | ~~~~~~~~~~ 21 | 22 | .. 23 | SQLAlchemy 2.0.32: 129582 locs 24 | Django 5.1 + 2 deps: 129682 locs (python & js) 25 | 26 | ORMs and SQL Query Builders offer object interfaces to generate and execute SQL. 27 | They exist to ease development, not to make it simpler. 28 | They have a large code base to provide their services, and many complement packages 29 | to provide more advanced features, resulting in over a *hundred of thousand* lines of code. 30 | Inheriting object hierarchies, mixing data with behaviors, mirroring a database schema, and generating SQL are not simple. 31 | ORMs are introduced early in a project's life when requirements are limited and the need to move fast is paramount. 32 | As a project grows, ORM objects and their relations grow too, they become a source of complexity and coupling. 33 | 34 | ``aiosql`` doesn't solve these problems directly either, your application will still get more complex with time. 35 | You can write bad SQL and bad python. 36 | But, with aiosql there is no mandate that all interaction with the database go 37 | through a complex network of related python objects that mirror a database schema. 38 | The only mandates are that you write SQL to talk to the database and python to use the data. 39 | From there you start with a system in which the database and the application are intentionally 40 | separate and independent from each other so they can change independently. 41 | The architecture of your application and the boundaries you choose between it and the database is left to you. 42 | 43 | The documentation for projects like `SQLAlchemy `__ and 44 | `Django DB `__ can give you a better vision 45 | for the class of problems that ORMs do solve and the productivity gains they intend. 46 | Please choose these projects over ``aiosql`` if you find that they fit the needs of your application better. 47 | 48 | Loading Queries 49 | --------------- 50 | 51 | This section goes over the three ways to make SQL queries available for execution in python. 52 | You'll learn the basics of defining queries so aiosql can find them and turn them into methods 53 | on a ``Queries`` object. 54 | For more details reference the :doc:`defining-sql-queries` documentation. 55 | 56 | From a SQL File 57 | ~~~~~~~~~~~~~~~ 58 | 59 | SQL can be loaded by providing a path to a ``.sql`` file. 60 | Below is a *blogs.sql* file that defines two queries. 61 | 62 | .. code:: sql 63 | 64 | -- name: get_all_blogs 65 | select blogid, 66 | userid, 67 | title, 68 | content, 69 | published 70 | from blogs; 71 | 72 | -- name: get_user_blogs 73 | -- Get blogs with a fancy formatted published date and author field 74 | select b.blogid, 75 | b.title, 76 | strftime('%Y-%m-%d %H:%M', b.published) as published, 77 | u.username as author 78 | from blogs b 79 | inner join users u on b.userid = u.userid 80 | where u.username = :username 81 | order by b.published desc; 82 | 83 | Notice the ``-- name: `` comments and the ``:username`` substitution variable. 84 | The comments that start with ``-- name:`` are the magic of aiosql. 85 | They are used by ```aiosql.from_path`` <./api.md#aiosqlfrom_path>`__ to parse the file 86 | into separate methods accessible by the name. 87 | The ``aiosql.from_path`` function takes a path to a sql file or directory 88 | and the name of the database driver intended for use with the methods. 89 | 90 | .. code:: python 91 | 92 | queries = aiosql.from_path("blogs.sql", "sqlite3") 93 | 94 | In the case of *blogs.sql* we expect the following two methods to be available. 95 | The ``username`` parameter of ``get_user_blogs`` will substitute in for the ``:username`` variable in the SQL. 96 | Standard ``SELECT`` statements return a generator, i.e. something which can be iterated upon, 97 | for instance with a ``for`` loop. 98 | Just cast the result to ``list`` to get an actual list. 99 | The generator returns what the underlying driver returns, usually tuples. 100 | 101 | .. code:: python 102 | 103 | def get_all_blogs(self) -> Generator[Any]: 104 | pass 105 | 106 | def get_user_blogs(self, username: str) -> Generator[Any]: 107 | pass 108 | 109 | From an SQL String 110 | ~~~~~~~~~~~~~~~~~~ 111 | 112 | SQL can be loaded from a string as well. 113 | The result below is the same as the first example above that loads from a SQL file. 114 | 115 | .. code:: python 116 | 117 | sql_str = """ 118 | -- name: get_all_blogs 119 | select blogid, 120 | userid, 121 | title, 122 | content, 123 | published 124 | from blogs; 125 | 126 | -- name: get_user_blogs 127 | -- Get blogs with a fancy formatted published date and author field 128 | select b.blogid, 129 | b.title, 130 | strftime('%Y-%m-%d %H:%M', b.published) as published, 131 | u.username as author 132 | from blogs b 133 | inner join users u on b.userid = u.userid 134 | where u.username = :username 135 | order by b.published desc; 136 | """ 137 | 138 | queries = aiosql.from_str(sql_str, "sqlite3") 139 | 140 | The ``Queries`` object here will have two methods: 141 | 142 | .. code:: python 143 | 144 | queries.get_all_blogs(conn) 145 | queries.get_user_blogs(conn, username="johndoe") 146 | 147 | From a Directory of SQL Files 148 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 149 | 150 | Loading a directory of SQL files loads all of the queries defined in those files into a single object. 151 | The ``example/sql`` directory below contains three ``.sql`` files and can be loaded using 152 | ``aiosql.from_path`` `<./api.md#aiosqlfrom_path>`__. 153 | 154 | :: 155 | 156 | example/sql 157 | ├── blogs.sql 158 | ├── create_schema.sql 159 | └── users.sql 160 | 161 | .. code:: python 162 | 163 | queries = aiosql.from_path("example/sql", "sqlite3") 164 | 165 | The resulting ``queries`` object will have a mixture of methods from all the files. 166 | 167 | Subdirectories 168 | ^^^^^^^^^^^^^^ 169 | 170 | Introducing subdirectories allows namespacing queries. 171 | This provides a way to further organize and group queries conceptually. 172 | For instance, you could define blog queries separate from user queries access them on distinct 173 | properties of the queries object. 174 | 175 | Assume the *blogs.sql* and *users.sql* files both contain a ``-- name: get_all`` query. 176 | 177 | :: 178 | 179 | example/sql/ 180 | ├── blogs/ 181 | │   └── blogs.sql 182 | ├── create_schema.sql 183 | └── users/ 184 | └── users.sql 185 | 186 | .. code:: python 187 | 188 | queries = aiosql.from_path("example/sql", "sqlite3") 189 | 190 | The ``Queries`` object has two nested ``get_all`` methods accessible on attributes ``.blogs`` and ``.users``. 191 | The attributes reflect the names of the subdirectories. 192 | 193 | .. code:: python 194 | 195 | queries.blogs.get_all(conn) 196 | queries.users.get_all(conn) 197 | 198 | Calling Query Methods 199 | --------------------- 200 | 201 | Connections 202 | ~~~~~~~~~~~ 203 | 204 | The connection or ``conn`` is always the first argument to an ``aiosql`` method. 205 | The ``conn`` is an open connection to a database driver that your aiosql method can use for executing the sql it contains. 206 | Controlling connections outside of aiosql queries means you can call multiple queries and control them under one transaction, 207 | or otherwise set connection level properties that affect driver behavior. 208 | 209 | .. note:: 210 | 211 | For more see: :doc:`advanced-topics`. 212 | 213 | In the examples throughout this page a ``conn`` object has been passed. 214 | Here is a more code complete example that shows the connection creation and call to 215 | ``aiosql.from_path`` `<./api.md#aiosqlfrom_path>`__ that make a queries object. 216 | 217 | .. code:: pycon 218 | 219 | >>> import sqlite3 220 | >>> import aiosql 221 | >>> conn = sqlite3.connect("./blogs.db") 222 | >>> # Note the "sqlite3" driver_adapter argument is what tells 223 | >>> # aiosql it should be expecting a sqlite3 connection object. 224 | >>> queries = aiosql.from_path("./blogs.sql", "sqlite3") 225 | >>> queries.get_all_blogs(conn) 226 | [(1, 227 | 1, 228 | 'What I did Today', 229 | 'I mowed the lawn, washed some clothes, and ate a burger.\n' 230 | '\n' 231 | 'Until next time,\n' 232 | 'Bob', 233 | '2017-07-28'), 234 | (2, 3, 'Testing', 'Is this thing on?\n', '2018-01-01'), 235 | (3, 236 | 1, 237 | 'How to make a pie.', 238 | '1. Make crust\n2. Fill\n3. Bake\n4. Eat\n', 239 | '2018-11-23')] 240 | 241 | See the associated `AnoDB Project `__ for embedding both a connection pool and queries. 242 | 243 | Passing Parameters 244 | ~~~~~~~~~~~~~~~~~~ 245 | 246 | .. code:: sql 247 | 248 | -- name: get_user_blogs 249 | -- Get blogs with a fancy formatted published date and author field 250 | select b.blogid, 251 | b.title, 252 | strftime('%Y-%m-%d %H:%M', b.published) as published, 253 | u.username as author 254 | from blogs b 255 | inner join users u on b.userid = u.userid 256 | where u.username = :username 257 | order by b.published desc; 258 | 259 | ``aiosql`` allows parameterization of queries by parsing values like ``:username`` 260 | in the above query and having the resultant method expect an inbound argument to 261 | substitute for ``:username``. 262 | 263 | You can call the ``get_user_blogs`` function with plain arguments or keyword arguments with the 264 | name of the subsitution variable. 265 | 266 | .. code:: python 267 | 268 | >>> import sqlite3 269 | >>> import aiosql 270 | >>> conn = sqlite3.connect("./blogs.db") 271 | >>> queries = aiosql.from_path("./blogs.sql", "sqlite3") 272 | >>> 273 | >>> # Using keyword args 274 | >>> queries.get_user_blogs(conn, username="bobsmith") 275 | [(3, 'How to make a pie.', '2018-11-23 00:00', 'bobsmith'), (1, 'What I did Today', '2017-07-28 00:00', 'bobsmith')] 276 | >>> 277 | >>> # Using positional argument 278 | >>> queries.get_user_blogs(conn, "janedoe") 279 | [(2, 'Testing', '2018-01-01 00:00', 'janedoe')] 280 | 281 | .. warning:: 282 | 283 | When passing positional arguments aiosql will apply them in the order that the substitutions appear in your SQL. 284 | This can be convenient and clear in some cases, but very confusing in others. 285 | You might want to choose to always name your arguments for clarity. 286 | Consider enforcing this behavior by passing ``kwargs_only=True`` when creating the queries. 287 | 288 | It is also possible to access simple object attributes in a query, with the dot syntax: 289 | 290 | .. code:: sql 291 | 292 | -- name: add_user 293 | insert into users(username, name) 294 | values (:u.username, :u.name); 295 | 296 | Then simple pass your object as ``u``: 297 | 298 | .. code:: python 299 | 300 | # User is some class with attributes username and name 301 | calvin = User("calvin", "Calvin") 302 | queries.add_user(u=calvin) 303 | -------------------------------------------------------------------------------- /tests/test_loading.py: -------------------------------------------------------------------------------- 1 | import sys 2 | import re 3 | import inspect 4 | from pathlib import Path 5 | from unittest import mock 6 | 7 | import aiosql 8 | from aiosql import SQLParseException, SQLLoadException 9 | from aiosql.queries import Queries 10 | from aiosql.query_loader import QueryLoader 11 | 12 | import pytest 13 | 14 | pytestmark = [ 15 | pytest.mark.misc, 16 | ] 17 | 18 | 19 | @pytest.fixture 20 | def sql_dir(): 21 | return Path(__file__).parent / "blogdb/sql" 22 | 23 | 24 | @pytest.fixture 25 | def sql_file(sql_dir): 26 | return sql_dir / "blogs/blogs.sql" 27 | 28 | 29 | @pytest.fixture 30 | def sql(sql_file): 31 | with open(sql_file) as f: 32 | return f.read() 33 | 34 | 35 | def test_version(): 36 | assert re.match(r"\d+\.\d+(\.?dev\d*)?$", aiosql.__version__) 37 | 38 | 39 | def test_frompath_queries_cls(sql_dir): 40 | class TestQueries(Queries): 41 | pass 42 | 43 | queries = aiosql.from_path(sql_dir, "aiosqlite", queries_cls=TestQueries, kwargs_only=False) 44 | assert isinstance(queries, TestQueries) 45 | 46 | assert repr(queries).startswith("Queries(") 47 | 48 | 49 | def test_frompath_queryloader_cls(sql_dir): 50 | mock_loader = mock.MagicMock(wraps=QueryLoader) 51 | 52 | aiosql.from_path(sql_dir, "aiosqlite", loader_cls=mock_loader, kwargs_only=False) 53 | 54 | assert mock_loader.called 55 | 56 | 57 | def test_fromstr_queries_cls(sql): 58 | class TestQueries(Queries): 59 | pass 60 | 61 | queries = aiosql.from_str(sql, "aiosqlite", queries_cls=TestQueries) 62 | assert isinstance(queries, TestQueries) 63 | 64 | 65 | def test_fromstr_queryloader_cls(sql): 66 | mock_loader = mock.MagicMock(wraps=QueryLoader) 67 | 68 | aiosql.from_str(sql, "aiosqlite", loader_cls=mock_loader) 69 | 70 | assert mock_loader.called 71 | 72 | 73 | def test_trailing_space_on_lines_does_not_error(): 74 | # There is whitespace in this string after the line ends 75 | sql_str = "-- name: trailing-space^ \n" 76 | sql_str += "select * from test; \n" 77 | 78 | try: 79 | aiosql.from_str(sql_str, "aiosqlite") 80 | except SQLParseException: # pragma: no cover 81 | pytest.fail("Raised SQLParseException due to trailing space in query.") 82 | 83 | 84 | def test_non_ascii_char(): 85 | # this triggers a warning, that we do not really check but for coverage 86 | q = aiosql.from_str("-- name: zéro\nSELECT 0;\n", "sqlite3") 87 | assert "zéro" in q._available_queries 88 | q = aiosql.from_str("-- name: éêèëÉÊÈË\nSELECT 'eeeeeeee!';\n", "sqlite3") 89 | assert "éêèëÉÊÈË" in q._available_queries 90 | q = aiosql.from_str("-- name: 안녕하세요\nSELECT 'hello!';\n", "sqlite3") 91 | assert "안녕하세요" in q._available_queries 92 | 93 | 94 | def test_loading_query_signature(): 95 | sql_str = "-- name: get^\n" "select * from test where foo=:foo and bar=:bar" 96 | queries = aiosql.from_str(sql_str, "aiosqlite") 97 | assert queries.get.__signature__ == inspect.Signature( 98 | [ 99 | inspect.Parameter("self", kind=inspect.Parameter.POSITIONAL_OR_KEYWORD), 100 | inspect.Parameter("foo", kind=inspect.Parameter.KEYWORD_ONLY), 101 | inspect.Parameter("bar", kind=inspect.Parameter.KEYWORD_ONLY), 102 | ] 103 | ) 104 | 105 | 106 | def test_names(): 107 | try: 108 | queries = aiosql.from_str("-- name: 1st\nSELECT 1;\n", "sqlite3") 109 | pytest.fail("'1st' should be rejected") 110 | except SQLParseException as e: 111 | assert '"1st"' in str(e) 112 | try: 113 | queries = aiosql.from_str("-- name: one$garbage\nSELECT 1;\n", "sqlite3") 114 | pytest.fail("garbage should be rejected") 115 | except SQLParseException as e: 116 | assert 'garbage"' in str(e) 117 | try: 118 | queries = aiosql.from_str("-- name: foo-bla\nSELECT 1;\n" * 2, "sqlite3") 119 | pytest.fail("must reject homonymous queries") 120 | except SQLLoadException as e: 121 | assert "foo_bla" in str(e) 122 | # - is okay because mapped to _ 123 | queries = aiosql.from_str("-- name: -dash\nSELECT 1;\n", "sqlite3") 124 | assert "_dash" in queries.available_queries 125 | 126 | 127 | def test_loading_query_signature_with_duplicate_parameter(): 128 | sql_str = "-- name: get^\n" "select * from test where foo=:foo and foo=:foo" 129 | queries = aiosql.from_str(sql_str, "aiosqlite") 130 | assert queries.get.__signature__ == inspect.Signature( 131 | [ 132 | inspect.Parameter("self", kind=inspect.Parameter.POSITIONAL_OR_KEYWORD), 133 | inspect.Parameter("foo", kind=inspect.Parameter.KEYWORD_ONLY), 134 | ] 135 | ) 136 | 137 | 138 | def test_adapters(): 139 | try: 140 | aiosql.aiosql._make_driver_adapter("no-such-driver-adapter") 141 | pytest.fail("must raise an exception") # pragma: no cover 142 | except ValueError as e: 143 | assert "unregistered driver_adapter" in str(e) 144 | 145 | class PyFormatConnector: 146 | paramstyle = "pyformat" 147 | 148 | a = aiosql.aiosql._make_driver_adapter(PyFormatConnector) 149 | assert type(a) == aiosql.adapters.PyFormatAdapter 150 | 151 | class NamedConnector: 152 | paramstyle = "named" 153 | 154 | a = aiosql.aiosql._make_driver_adapter(NamedConnector) 155 | assert type(a) == aiosql.adapters.GenericAdapter 156 | 157 | class NoSuchConnector: 158 | paramstyle = "no-such-style" 159 | 160 | try: 161 | aiosql.aiosql._make_driver_adapter(NoSuchConnector) 162 | pytest.fail("must raise an exception") # pragma: no cover 163 | except ValueError as e: 164 | assert "Unexpected driver" in str(e) 165 | 166 | try: 167 | aiosql.aiosql._make_driver_adapter(True) # type: ignore 168 | pytest.fail("must raise an exception") # pragma: no cover 169 | except ValueError as e: 170 | assert "Unexpected driver" in str(e) 171 | 172 | 173 | def test_no_such_path(): 174 | try: 175 | aiosql.from_path("/no/such/file", "sqlite3") 176 | pytest.fail("must raise an exception") # pragma: no cover 177 | except SQLLoadException as e: 178 | assert "File does not exist" in str(e) 179 | 180 | 181 | def test_file_loading(sql_file): 182 | db = aiosql.from_path(sql_file, "sqlite3") 183 | assert "get_user_blogs" in db.__dict__ 184 | 185 | 186 | def test_misc(sql_file): 187 | try: 188 | queries = aiosql.queries.Queries("sqlite3") 189 | queries._make_sync_fn(("hello", None, -1, "SELECT NULL;", None, None, None, None, None)) 190 | pytest.fail("must raise an exception") # pragma: no cover 191 | except ValueError as e: 192 | assert "Unknown operation" in str(e) 193 | try: 194 | db = aiosql.from_str("-- name: a*b\nSELECT 'ab'\n", "sqlite3") 195 | pytest.fail("must raise en exception") # pragma: no cover 196 | except Exception as e: 197 | assert "invalid query name and operation" in str(e) 198 | ql = aiosql.query_loader.QueryLoader(None, None) 199 | try: 200 | ql.load_query_data_from_dir_path(sql_file) 201 | pytest.fail("must raise en exception") # pragma: no cover 202 | except ValueError as e: 203 | assert "must be a directory" in str(e) 204 | 205 | 206 | def test_kwargs(): 207 | # kwargs_only == True 208 | queries = aiosql.from_str("-- name: plus_one$\nSELECT 1 + :val;\n", "sqlite3", kwargs_only=True) 209 | import sqlite3 210 | 211 | conn = sqlite3.connect(":memory:") 212 | assert 42 == queries.plus_one(conn, val=41) 213 | try: 214 | queries.plus_one(conn, 2) 215 | pytest.fail("must raise an exception") # pragma: no cover 216 | except ValueError as e: 217 | assert "kwargs" in str(e) 218 | # kwargs_only == False 219 | queries = aiosql.from_str( 220 | "-- name: plus_two$\nSELECT 2 + :val;\n", "sqlite3", kwargs_only=False 221 | ) 222 | assert 42 == queries.plus_two(conn, val=40) 223 | try: 224 | queries.plus_two(conn, 2, val=41) 225 | pytest.fail("must raise an exception") # pragma: no cover 226 | except ValueError as e: 227 | assert "mix" in str(e) 228 | 229 | import sqlite3 230 | 231 | PARAM_QUERIES = """ 232 | -- name: xlii()$ 233 | SELECT 42; 234 | 235 | -- name: next(n)$ 236 | SELECT :n+1; 237 | 238 | -- name: add(n, m)$ 239 | SELECT :n+:m; 240 | 241 | -- name: sub$ 242 | SELECT :n - :m; 243 | """ 244 | 245 | def run_param_queries(conn, kwargs_only: bool = True): 246 | q = aiosql.from_str(PARAM_QUERIES, "sqlite3", kwargs_only=kwargs_only) 247 | assert q.xlii(conn) == 42 248 | assert q.next(conn, n=41) == 42 249 | assert q.add(conn, n=19, m=23) == 42 250 | assert q.sub(conn, n=47, m=5) == 42 251 | # usage errors 252 | try: 253 | q.next(conn, 41) 254 | pytest.fail("must complain about positional parameter") 255 | except ValueError as e: 256 | assert "positional" in str(e) 257 | try: 258 | ft = q.sub(conn, 47, 5) 259 | if kwargs_only: 260 | pytest.fail("must complain about positional parameter") 261 | else: 262 | assert sys.version_info < (3, 14) 263 | assert ft == 42 264 | except sqlite3.ProgrammingError as e: # scheduled deprecation 265 | assert sys.version_info >= (3, 14) 266 | except ValueError as e: 267 | assert "positional" in str(e) 268 | 269 | def test_parameter_declarations(): 270 | # ok 271 | conn = sqlite3.connect(":memory:") 272 | run_param_queries(conn, kwargs_only=True) 273 | run_param_queries(conn, kwargs_only=False) 274 | conn.close() 275 | # errors 276 | try: 277 | aiosql.from_str("-- name: foo()\nSELECT :N + 1;\n", "sqlite3") 278 | pytest.fail("must raise an exception") 279 | except SQLParseException as e: 280 | assert "undeclared" in str(e) and "N" in str(e) 281 | try: 282 | aiosql.from_str("-- name: foo(N, M)\nSELECT :N + 1;\n", "sqlite3") 283 | pytest.fail("must raise an exception") 284 | except SQLParseException as e: 285 | assert "unused" in str(e) and "M" in str(e) 286 | try: 287 | aiosql.from_str("-- name: foo(a)#\nCREATE TABLE :a();\n", "sqlite3") 288 | pytest.fail("must raise an exception") 289 | except SQLParseException as e: 290 | assert "script" in str(e) 291 | 292 | def test_empty_query(): 293 | try: 294 | aiosql.from_str("-- name: foo\n--name: bla\n", "sqlite3") 295 | pytest.fail("must raise an exception") 296 | except SQLParseException as e: 297 | assert "empty query" in str(e) 298 | try: 299 | aiosql.from_str("-- name: foo\n-- record_class: Foo\n--name: bla\n", "sqlite3") 300 | pytest.fail("must raise an exception") 301 | except SQLParseException as e: 302 | assert "empty sql" in str(e) 303 | try: 304 | aiosql.from_str("-- name: foo\n \r\n\t --name: bla\n", "sqlite3") 305 | pytest.fail("must raise an exception") 306 | except SQLParseException as e: 307 | assert "empty query" in str(e) 308 | try: 309 | aiosql.from_str("-- name: foo\n-- just a comment\n--name: bla\n", "sqlite3") 310 | pytest.fail("must raise an exception") 311 | except SQLParseException as e: 312 | assert "empty sql" in str(e) 313 | try: 314 | aiosql.from_str("-- name: foo\n-- record_class: Foo\n-- just a comment\n--name: bla\n", "sqlite3") 315 | pytest.fail("must raise an exception") 316 | except SQLParseException as e: 317 | assert "empty sql" in str(e) 318 | try: 319 | aiosql.from_str("-- name: foo\n-- just a comment\n ; \n-- hop\n--name: bla\n", "sqlite3") 320 | pytest.fail("must raise an exception") 321 | except SQLParseException as e: 322 | assert "empty sql" in str(e) 323 | try: 324 | aiosql.from_str("-- name: foo\n-- just a comment\n;\n", "sqlite3") 325 | pytest.fail("must raise an exception") 326 | except SQLParseException as e: 327 | assert "empty sql" in str(e) 328 | try: 329 | aiosql.from_str("-- name: foo\n-- record_class: Foo\n-- just a comment\n;\n", "sqlite3") 330 | pytest.fail("must raise an exception") 331 | except SQLParseException as e: 332 | assert "empty sql" in str(e) 333 | --------------------------------------------------------------------------------