├── test
├── __init__.py
├── conftest.py
├── test_inspect.py
├── test_orm.py
└── test_suite.py
├── .github
├── scripts
│ ├── __init__.py
│ ├── increment_version_test.py
│ └── increment_version.py
└── workflows
│ ├── docs.yml
│ ├── style.yml
│ ├── tests.yml
│ └── python-publish.yml
├── examples
├── alembic
│ ├── __init__.py
│ ├── migrations
│ │ ├── README
│ │ ├── script.py.mako
│ │ ├── versions
│ │ │ ├── d91d9200b65c_create_series_table.py
│ │ │ ├── 820b994ffa7c_create_seasons_table.py
│ │ │ └── 9085c679f5dc_create_episodes_table.py
│ │ └── env.py
│ ├── models.py
│ ├── README.md
│ └── alembic.ini
└── basic_example
│ ├── models.py
│ ├── fill_tables.py
│ └── example.py
├── MANIFEST.in
├── ydb_sqlalchemy
├── _version.py
├── __init__.py
└── sqlalchemy
│ ├── dml.py
│ ├── compiler
│ ├── __init__.py
│ ├── sa14.py
│ ├── sa20.py
│ └── base.py
│ ├── json.py
│ ├── test_sqlalchemy.py
│ ├── datetime_types.py
│ ├── requirements.py
│ ├── dbapi_adapter.py
│ ├── types.py
│ └── __init__.py
├── docs
├── requirements.txt
├── Makefile
├── api
│ └── index.rst
├── README.md
├── _static
│ └── logo.svg
├── installation.rst
├── index.rst
├── connection.rst
├── quickstart.rst
├── conf.py
├── types.rst
└── migrations.rst
├── requirements.txt
├── pyproject.toml
├── docker-compose.yml
├── test-requirements.txt
├── setup.cfg
├── wait_container_ready.py
├── setup.py
├── tox.ini
├── CHANGELOG.md
├── .gitignore
├── README.md
└── LICENSE
/test/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/.github/scripts/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/examples/alembic/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/MANIFEST.in:
--------------------------------------------------------------------------------
1 | include requirements.txt
2 |
--------------------------------------------------------------------------------
/ydb_sqlalchemy/_version.py:
--------------------------------------------------------------------------------
1 | VERSION = "0.1.14"
2 |
--------------------------------------------------------------------------------
/examples/alembic/migrations/README:
--------------------------------------------------------------------------------
1 | Generic single-database configuration.
--------------------------------------------------------------------------------
/docs/requirements.txt:
--------------------------------------------------------------------------------
1 | sphinx_rtd_theme==2.0.0
2 | sphinx-copybutton==0.5.2
3 |
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | sqlalchemy >= 1.4.0, < 3.0.0
2 | ydb >= 3.21.6
3 | ydb-dbapi >= 0.1.16
4 |
--------------------------------------------------------------------------------
/pyproject.toml:
--------------------------------------------------------------------------------
1 | [tool.black]
2 | line-length = 120
3 |
4 | [tool.isort]
5 | profile = "black"
6 |
--------------------------------------------------------------------------------
/ydb_sqlalchemy/__init__.py:
--------------------------------------------------------------------------------
1 | from ._version import VERSION # noqa: F401
2 | from ydb_dbapi import IsolationLevel # noqa: F401
3 | from .sqlalchemy import Upsert, types, upsert # noqa: F401
4 | import ydb_dbapi as dbapi
5 |
--------------------------------------------------------------------------------
/docker-compose.yml:
--------------------------------------------------------------------------------
1 | version: "3.3"
2 | services:
3 | ydb:
4 | image: ydbplatform/local-ydb:trunk
5 | restart: always
6 | ports:
7 | - "2136:2136"
8 | hostname: localhost
9 | environment:
10 | - YDB_USE_IN_MEMORY_PDISKS=true
11 | - YDB_ENABLE_COLUMN_TABLES=true
12 |
--------------------------------------------------------------------------------
/test-requirements.txt:
--------------------------------------------------------------------------------
1 | pyyaml==5.3.1
2 | greenlet
3 |
4 | sqlalchemy==2.0.7
5 | ydb >= 3.18.8
6 | ydb-dbapi >= 0.1.1
7 | requests<2.29
8 | pytest==7.2.2
9 | docker==6.0.1
10 | docker-compose==1.29.2
11 | dockerpty==0.4.1
12 | flake8==3.9.2
13 | black==23.3.0
14 | pytest-cov
15 | pytest-asyncio
16 | isort==5.13.2
17 |
--------------------------------------------------------------------------------
/setup.cfg:
--------------------------------------------------------------------------------
1 | [tool:pytest]
2 | addopts= --tb native -v -r fxX -p no:warnings
3 |
4 | [sqla_testing]
5 | requirement_cls=ydb_sqlalchemy.sqlalchemy.requirements:Requirements
6 | profile_file=test/profiles.txt
7 |
8 | [db]
9 | default=yql+ydb://localhost:2136/local
10 | ydb=yql+ydb://localhost:2136/local
11 | ydb_async=yql+ydb_async://localhost:2136/local
12 |
--------------------------------------------------------------------------------
/ydb_sqlalchemy/sqlalchemy/dml.py:
--------------------------------------------------------------------------------
1 | import sqlalchemy as sa
2 |
3 |
4 | class Upsert(sa.sql.Insert):
5 | __visit_name__ = "upsert"
6 | _propagate_attrs = {"compile_state_plugin": "yql"}
7 | stringify_dialect = "yql"
8 | inherit_cache = False
9 |
10 |
11 | @sa.sql.base.CompileState.plugin_for("yql", "upsert")
12 | class UpsertDMLState(sa.sql.dml.InsertDMLState):
13 | pass
14 |
--------------------------------------------------------------------------------
/.github/workflows/docs.yml:
--------------------------------------------------------------------------------
1 | name: Deploy Sphinx documentation to Github Pages
2 |
3 | on:
4 | push:
5 | branches: [main] # branch to trigger deployment
6 |
7 | jobs:
8 | pages:
9 | runs-on: ubuntu-latest
10 | environment:
11 | name: github-pages
12 | url: ${{ steps.deployment.outputs.page_url }}
13 | permissions:
14 | pages: write
15 | id-token: write
16 | steps:
17 | - id: deployment
18 | uses: sphinx-notes/pages@v3
--------------------------------------------------------------------------------
/ydb_sqlalchemy/sqlalchemy/compiler/__init__.py:
--------------------------------------------------------------------------------
1 | import sqlalchemy as sa
2 |
3 | sa_version = sa.__version__
4 |
5 | if sa_version.startswith("2."):
6 | from .sa20 import YqlCompiler
7 | from .sa20 import YqlDDLCompiler
8 | from .sa20 import YqlTypeCompiler
9 | from .sa20 import YqlIdentifierPreparer
10 | elif sa_version.startswith("1.4."):
11 | from .sa14 import YqlCompiler
12 | from .sa14 import YqlDDLCompiler
13 | from .sa14 import YqlTypeCompiler
14 | from .sa14 import YqlIdentifierPreparer
15 | else:
16 | raise RuntimeError("Unsupported SQLAlchemy version.")
17 |
--------------------------------------------------------------------------------
/test/conftest.py:
--------------------------------------------------------------------------------
1 | import pytest
2 | from sqlalchemy.dialects import registry
3 |
4 | registry.register("yql.ydb", "ydb_sqlalchemy.sqlalchemy", "YqlDialect")
5 | registry.register("yql.ydb_async", "ydb_sqlalchemy.sqlalchemy", "AsyncYqlDialect")
6 | registry.register("ydb_async", "ydb_sqlalchemy.sqlalchemy", "AsyncYqlDialect")
7 | registry.register("ydb", "ydb_sqlalchemy.sqlalchemy", "YqlDialect")
8 | registry.register("yql", "ydb_sqlalchemy.sqlalchemy", "YqlDialect")
9 | pytest.register_assert_rewrite("sqlalchemy.testing.assertions")
10 |
11 | from sqlalchemy.testing.plugin.pytestplugin import * # noqa: E402, F401, F403
12 |
--------------------------------------------------------------------------------
/docs/Makefile:
--------------------------------------------------------------------------------
1 | # Minimal makefile for Sphinx documentation
2 | #
3 |
4 | # You can set these variables from the command line.
5 | SPHINXOPTS =
6 | SPHINXBUILD = sphinx-build
7 | SOURCEDIR = .
8 | BUILDDIR = .build
9 |
10 | # Put it first so that "make" without argument is like "make help".
11 | help:
12 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
13 |
14 | .PHONY: help Makefile
15 |
16 | # Catch-all target: route all unknown targets to Sphinx using the new
17 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
18 | %: Makefile
19 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
20 |
--------------------------------------------------------------------------------
/.github/workflows/style.yml:
--------------------------------------------------------------------------------
1 | name: Style checks
2 |
3 | on:
4 | push:
5 | pull_request:
6 |
7 | jobs:
8 | build:
9 | runs-on: ubuntu-latest
10 | strategy:
11 | max-parallel: 4
12 | matrix:
13 | python-version: [3.9]
14 | environment: [style, black]
15 |
16 | steps:
17 | - uses: actions/checkout@v1
18 | - name: Set up Python ${{ matrix.python-version }}
19 | uses: actions/setup-python@v2
20 | with:
21 | python-version: ${{ matrix.python-version }}
22 | - name: Install dependencies
23 | run: |
24 | python -m pip install --upgrade pip
25 | pip install tox==4.2.6
26 | - name: Test with tox
27 | run: tox -e ${{ matrix.environment }}
28 |
--------------------------------------------------------------------------------
/examples/alembic/migrations/script.py.mako:
--------------------------------------------------------------------------------
1 | """${message}
2 |
3 | Revision ID: ${up_revision}
4 | Revises: ${down_revision | comma,n}
5 | Create Date: ${create_date}
6 |
7 | """
8 | from typing import Sequence, Union
9 |
10 | from alembic import op
11 | import sqlalchemy as sa
12 | ${imports if imports else ""}
13 |
14 | # revision identifiers, used by Alembic.
15 | revision: str = ${repr(up_revision)}
16 | down_revision: Union[str, None] = ${repr(down_revision)}
17 | branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
18 | depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
19 |
20 |
21 | def upgrade() -> None:
22 | ${upgrades if upgrades else "pass"}
23 |
24 |
25 | def downgrade() -> None:
26 | ${downgrades if downgrades else "pass"}
27 |
--------------------------------------------------------------------------------
/wait_container_ready.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | import ydb
4 | import time
5 |
6 |
7 | def wait_container_ready(driver):
8 | driver.wait(timeout=30)
9 |
10 | with ydb.SessionPool(driver) as pool:
11 | started_at = time.time()
12 | while time.time() - started_at < 30:
13 | try:
14 | with pool.checkout() as session:
15 | session.execute_scheme("CREATE TABLE `.sys_health/test_table` (A Int32, PRIMARY KEY(A));")
16 | return True
17 |
18 | except ydb.Error:
19 | time.sleep(1)
20 |
21 | raise RuntimeError("Container is not ready after timeout.")
22 |
23 |
24 | def main():
25 | with ydb.Driver(endpoint="localhost:2136", database="/local") as driver:
26 | wait_container_ready(driver)
27 |
28 |
29 | if __name__ == "__main__":
30 | main()
31 |
--------------------------------------------------------------------------------
/.github/workflows/tests.yml:
--------------------------------------------------------------------------------
1 | name: Tests
2 |
3 | on:
4 | push:
5 | branches:
6 | - master
7 | pull_request:
8 |
9 | jobs:
10 | build:
11 | runs-on: ubuntu-latest
12 |
13 | concurrency:
14 | group: unit-${{ github.ref }}-${{ matrix.environment }}-${{ matrix.python-version }}-${{ matrix.folder }}
15 | cancel-in-progress: true
16 |
17 | strategy:
18 | fail-fast: false
19 | max-parallel: 4
20 | matrix:
21 | python-version: [3.9]
22 | environment: [test-unit, test-dialect]
23 |
24 | steps:
25 | - uses: actions/checkout@v1
26 | - name: Set up Python ${{ matrix.python-version }}
27 | uses: actions/setup-python@v2
28 | with:
29 | python-version: ${{ matrix.python-version }}
30 |
31 | - name: Install tox
32 | run: |
33 | python -m pip install --upgrade pip
34 | pip install tox==4.2.6
35 | - name: Run unit tests
36 | run: tox -e ${{ matrix.environment }}
37 |
--------------------------------------------------------------------------------
/examples/alembic/models.py:
--------------------------------------------------------------------------------
1 | import sqlalchemy.orm as orm
2 | from sqlalchemy import Column, Integer, Unicode
3 |
4 | Base = orm.declarative_base()
5 |
6 |
7 | class Series(Base):
8 | __tablename__ = "series"
9 |
10 | series_id = Column(Integer, primary_key=True)
11 | title = Column(Unicode)
12 | series_info = Column(Unicode)
13 | release_date = Column(Integer)
14 |
15 |
16 | class Seasons(Base):
17 | __tablename__ = "seasons"
18 |
19 | series_id = Column(Integer, primary_key=True)
20 | season_id = Column(Integer, primary_key=True)
21 | title = Column(Unicode)
22 | first_aired = Column(Integer)
23 | last_aired = Column(Integer)
24 |
25 |
26 | class Episodes(Base):
27 | __tablename__ = "episodes"
28 |
29 | series_id = Column(Integer, primary_key=True)
30 | season_id = Column(Integer, primary_key=True)
31 | episode_id = Column(Integer, primary_key=True)
32 | title = Column(Unicode)
33 | air_date = Column(Integer)
34 |
--------------------------------------------------------------------------------
/examples/basic_example/models.py:
--------------------------------------------------------------------------------
1 | import sqlalchemy.orm as orm
2 | from sqlalchemy import Column, Integer, Unicode
3 |
4 | Base = orm.declarative_base()
5 |
6 |
7 | class Series(Base):
8 | __tablename__ = "series"
9 |
10 | series_id = Column(Integer, primary_key=True)
11 | title = Column(Unicode)
12 | series_info = Column(Unicode)
13 | release_date = Column(Integer)
14 |
15 |
16 | class Seasons(Base):
17 | __tablename__ = "seasons"
18 |
19 | series_id = Column(Integer, primary_key=True)
20 | season_id = Column(Integer, primary_key=True)
21 | title = Column(Unicode)
22 | first_aired = Column(Integer)
23 | last_aired = Column(Integer)
24 |
25 |
26 | class Episodes(Base):
27 | __tablename__ = "episodes"
28 |
29 | series_id = Column(Integer, primary_key=True)
30 | season_id = Column(Integer, primary_key=True)
31 | episode_id = Column(Integer, primary_key=True)
32 | title = Column(Unicode)
33 | air_date = Column(Integer)
34 |
--------------------------------------------------------------------------------
/.github/scripts/increment_version_test.py:
--------------------------------------------------------------------------------
1 | import pytest
2 |
3 | from .increment_version import VersionLine
4 |
5 | @pytest.mark.parametrize(
6 | "source,inc_type,with_beta,result",
7 | [
8 | ("0.0.0", 'patch', False, "0.0.1"),
9 | ("0.0.1", 'patch', False, "0.0.2"),
10 | ("0.0.1b1", 'patch', False, "0.0.1"),
11 | ("0.0.0", 'patch', True, "0.0.1b1"),
12 | ("0.0.1", 'patch', True, "0.0.2b1"),
13 | ("0.0.2b1", 'patch', True, "0.0.2b2"),
14 | ("0.0.1", 'minor', False, "0.1.0"),
15 | ("0.0.1b1", 'minor', False, "0.1.0"),
16 | ("0.1.0b1", 'minor', False, "0.1.0"),
17 | ("0.1.0", 'minor', True, "0.2.0b1"),
18 | ("0.1.0b1", 'minor', True, "0.1.0b2"),
19 | ("0.1.1b1", 'minor', True, "0.2.0b1"),
20 | ("3.0.0b1", 'patch', True, "3.0.0b2"),
21 | ]
22 | )
23 | def test_increment_version(source, inc_type, with_beta, result):
24 | version = VersionLine("", source)
25 | version.increment(inc_type, with_beta)
26 | incremented = str(version)
27 | assert incremented == result
28 |
29 |
--------------------------------------------------------------------------------
/docs/api/index.rst:
--------------------------------------------------------------------------------
1 | API Reference
2 | =============
3 |
4 | This section contains the complete API reference for YDB SQLAlchemy.
5 |
6 | Core Module
7 | -----------
8 |
9 | .. automodule:: ydb_sqlalchemy.sqlalchemy
10 | :members:
11 | :undoc-members:
12 | :show-inheritance:
13 |
14 | Types Module
15 | ------------
16 |
17 | .. automodule:: ydb_sqlalchemy.sqlalchemy.types
18 | :members:
19 | :undoc-members:
20 | :show-inheritance:
21 |
22 | DateTime Types
23 | --------------
24 |
25 | .. automodule:: ydb_sqlalchemy.sqlalchemy.datetime_types
26 | :members:
27 | :undoc-members:
28 | :show-inheritance:
29 |
30 | JSON Types
31 | ----------
32 |
33 | .. automodule:: ydb_sqlalchemy.sqlalchemy.json
34 | :members:
35 | :undoc-members:
36 | :show-inheritance:
37 |
38 | Compiler Module
39 | ---------------
40 |
41 | .. automodule:: ydb_sqlalchemy.sqlalchemy.compiler
42 | :members:
43 | :undoc-members:
44 | :show-inheritance:
45 |
46 | DML Operations
47 | --------------
48 |
49 | .. automodule:: ydb_sqlalchemy.sqlalchemy.dml
50 | :members:
51 | :undoc-members:
52 | :show-inheritance:
53 |
--------------------------------------------------------------------------------
/examples/alembic/migrations/versions/d91d9200b65c_create_series_table.py:
--------------------------------------------------------------------------------
1 | """create series table
2 |
3 | Revision ID: d91d9200b65c
4 | Revises:
5 | Create Date: 2024-12-10 14:50:07.017763
6 |
7 | """
8 | from typing import Sequence, Union
9 |
10 | from alembic import op
11 | import sqlalchemy as sa
12 |
13 |
14 | # revision identifiers, used by Alembic.
15 | revision: str = 'd91d9200b65c'
16 | down_revision: Union[str, None] = None
17 | branch_labels: Union[str, Sequence[str], None] = None
18 | depends_on: Union[str, Sequence[str], None] = None
19 |
20 |
21 | def upgrade() -> None:
22 | # ### commands auto generated by Alembic - please adjust! ###
23 | op.create_table('series',
24 | sa.Column('series_id', sa.Integer(), nullable=False),
25 | sa.Column('title', sa.Unicode(), nullable=True),
26 | sa.Column('series_info', sa.Unicode(), nullable=True),
27 | sa.Column('release_date', sa.Integer(), nullable=True),
28 | sa.PrimaryKeyConstraint('series_id')
29 | )
30 | # ### end Alembic commands ###
31 |
32 |
33 | def downgrade() -> None:
34 | # ### commands auto generated by Alembic - please adjust! ###
35 | op.drop_table('series')
36 | # ### end Alembic commands ###
37 |
--------------------------------------------------------------------------------
/ydb_sqlalchemy/sqlalchemy/json.py:
--------------------------------------------------------------------------------
1 | from typing import Tuple, Union
2 |
3 | from sqlalchemy import types as sqltypes
4 |
5 |
6 | class YqlJSON(sqltypes.JSON):
7 | class YqlJSONPathType(sqltypes.JSON.JSONPathType):
8 | def _format_value(self, value: Tuple[Union[str, int]]) -> str:
9 | path = "/"
10 | for elem in value:
11 | path += f"/{elem}"
12 | return path
13 |
14 | def bind_processor(self, dialect):
15 | super_proc = self.string_bind_processor(dialect)
16 |
17 | def process(value: Tuple[Union[str, int]]):
18 | value = self._format_value(value)
19 | if super_proc:
20 | value = super_proc(value)
21 | return value
22 |
23 | return process
24 |
25 | def literal_processor(self, dialect):
26 | super_proc = self.string_literal_processor(dialect)
27 |
28 | def process(value: Tuple[Union[str, int]]):
29 | value = self._format_value(value)
30 | if super_proc:
31 | value = super_proc(value)
32 | return value
33 |
34 | return process
35 |
--------------------------------------------------------------------------------
/ydb_sqlalchemy/sqlalchemy/test_sqlalchemy.py:
--------------------------------------------------------------------------------
1 | from datetime import date
2 | import sqlalchemy as sa
3 |
4 | from . import YqlDialect, types
5 |
6 |
7 | def test_casts():
8 | dialect = YqlDialect()
9 | expr = sa.literal_column("1/2")
10 |
11 | res_exprs = [
12 | sa.cast(expr, types.UInt32),
13 | sa.cast(expr, types.UInt64),
14 | sa.cast(expr, types.UInt8),
15 | sa.func.String.JoinFromList(
16 | sa.func.ListMap(sa.func.TOPFREQ(expr, 5), types.Lambda(lambda x: sa.cast(x, sa.Text))),
17 | ", ",
18 | ),
19 | ]
20 |
21 | strs = [str(res_expr.compile(dialect=dialect, compile_kwargs={"literal_binds": True})) for res_expr in res_exprs]
22 |
23 | assert strs == [
24 | "CAST(1/2 AS UInt32)",
25 | "CAST(1/2 AS UInt64)",
26 | "CAST(1/2 AS UInt8)",
27 | "String::JoinFromList(ListMap(TOPFREQ(1/2, 5), ($x) -> { RETURN CAST($x AS UTF8) ;}), ', ')",
28 | ]
29 |
30 |
31 | def test_ydb_types():
32 | dialect = YqlDialect()
33 |
34 | query = sa.literal(date(1996, 11, 19))
35 | compiled = query.compile(dialect=dialect, compile_kwargs={"literal_binds": True})
36 |
37 | assert str(compiled) == "Date('1996-11-19')"
38 |
--------------------------------------------------------------------------------
/test/test_inspect.py:
--------------------------------------------------------------------------------
1 | import sqlalchemy as sa
2 | from sqlalchemy import Column, Integer, Numeric, Table, Unicode
3 | from sqlalchemy.testing.fixtures import TablesTest
4 |
5 |
6 | class TestInspection(TablesTest):
7 | @classmethod
8 | def define_tables(cls, metadata):
9 | Table(
10 | "test",
11 | metadata,
12 | Column("id", Integer, primary_key=True, nullable=False),
13 | Column("value", Unicode),
14 | Column("num", Numeric(22, 9)),
15 | )
16 |
17 | def test_get_columns(self, connection):
18 | inspect = sa.inspect(connection)
19 |
20 | columns = inspect.get_columns("test")
21 | for c in columns:
22 | c["type"] = type(c["type"])
23 |
24 | assert columns == [
25 | {"name": "id", "type": sa.INTEGER, "nullable": False, "default": None},
26 | {"name": "value", "type": sa.TEXT, "nullable": True, "default": None},
27 | {"name": "num", "type": sa.DECIMAL, "nullable": True, "default": None},
28 | ]
29 |
30 | def test_has_table(self, connection):
31 | inspect = sa.inspect(connection)
32 |
33 | assert inspect.has_table("test")
34 | assert not inspect.has_table("foo")
35 |
--------------------------------------------------------------------------------
/examples/alembic/migrations/versions/820b994ffa7c_create_seasons_table.py:
--------------------------------------------------------------------------------
1 | """create seasons table
2 |
3 | Revision ID: 820b994ffa7c
4 | Revises: d91d9200b65c
5 | Create Date: 2024-12-10 14:50:44.633728
6 |
7 | """
8 | from typing import Sequence, Union
9 |
10 | from alembic import op
11 | import sqlalchemy as sa
12 |
13 |
14 | # revision identifiers, used by Alembic.
15 | revision: str = '820b994ffa7c'
16 | down_revision: Union[str, None] = 'd91d9200b65c'
17 | branch_labels: Union[str, Sequence[str], None] = None
18 | depends_on: Union[str, Sequence[str], None] = None
19 |
20 |
21 | def upgrade() -> None:
22 | # ### commands auto generated by Alembic - please adjust! ###
23 | op.create_table('seasons',
24 | sa.Column('series_id', sa.Integer(), nullable=False),
25 | sa.Column('season_id', sa.Integer(), nullable=False),
26 | sa.Column('title', sa.Unicode(), nullable=True),
27 | sa.Column('first_aired', sa.Integer(), nullable=True),
28 | sa.Column('last_aired', sa.Integer(), nullable=True),
29 | sa.PrimaryKeyConstraint('series_id', 'season_id')
30 | )
31 | # ### end Alembic commands ###
32 |
33 |
34 | def downgrade() -> None:
35 | # ### commands auto generated by Alembic - please adjust! ###
36 | op.drop_table('seasons')
37 | # ### end Alembic commands ###
38 |
--------------------------------------------------------------------------------
/examples/alembic/migrations/versions/9085c679f5dc_create_episodes_table.py:
--------------------------------------------------------------------------------
1 | """create episodes table
2 |
3 | Revision ID: 9085c679f5dc
4 | Revises: 820b994ffa7c
5 | Create Date: 2024-12-10 14:51:18.441037
6 |
7 | """
8 | from typing import Sequence, Union
9 |
10 | from alembic import op
11 | import sqlalchemy as sa
12 |
13 |
14 | # revision identifiers, used by Alembic.
15 | revision: str = '9085c679f5dc'
16 | down_revision: Union[str, None] = '820b994ffa7c'
17 | branch_labels: Union[str, Sequence[str], None] = None
18 | depends_on: Union[str, Sequence[str], None] = None
19 |
20 |
21 | def upgrade() -> None:
22 | # ### commands auto generated by Alembic - please adjust! ###
23 | op.create_table('episodes',
24 | sa.Column('series_id', sa.Integer(), nullable=False),
25 | sa.Column('season_id', sa.Integer(), nullable=False),
26 | sa.Column('episode_id', sa.Integer(), nullable=False),
27 | sa.Column('title', sa.Unicode(), nullable=True),
28 | sa.Column('air_date', sa.Integer(), nullable=True),
29 | sa.PrimaryKeyConstraint('series_id', 'season_id', 'episode_id')
30 | )
31 | # ### end Alembic commands ###
32 |
33 |
34 | def downgrade() -> None:
35 | # ### commands auto generated by Alembic - please adjust! ###
36 | op.drop_table('episodes')
37 | # ### end Alembic commands ###
38 |
--------------------------------------------------------------------------------
/ydb_sqlalchemy/sqlalchemy/compiler/sa14.py:
--------------------------------------------------------------------------------
1 | from typing import Union
2 | import sqlalchemy as sa
3 | import ydb
4 |
5 | from .base import (
6 | BaseYqlCompiler,
7 | BaseYqlDDLCompiler,
8 | BaseYqlIdentifierPreparer,
9 | BaseYqlTypeCompiler,
10 | )
11 |
12 |
13 | class YqlTypeCompiler(BaseYqlTypeCompiler):
14 | # We use YDB Double for sa.Float for compatibility with old dialect version
15 | def visit_FLOAT(self, type_: sa.FLOAT, **kw):
16 | return "DOUBLE"
17 |
18 | def get_ydb_type(
19 | self, type_: sa.types.TypeEngine, is_optional: bool
20 | ) -> Union[ydb.PrimitiveType, ydb.AbstractTypeBuilder]:
21 | if isinstance(type_, sa.TypeDecorator):
22 | type_ = type_.impl
23 |
24 | if isinstance(type_, sa.Float):
25 | ydb_type = ydb.PrimitiveType.Double
26 | if is_optional:
27 | return ydb.OptionalType(ydb_type)
28 | return ydb_type
29 |
30 | return super().get_ydb_type(type_, is_optional)
31 |
32 |
33 | class YqlIdentifierPreparer(BaseYqlIdentifierPreparer):
34 | ...
35 |
36 |
37 | class YqlCompiler(BaseYqlCompiler):
38 | _type_compiler_cls = YqlTypeCompiler
39 |
40 | def visit_upsert(self, insert_stmt, **kw):
41 | return self.visit_insert(insert_stmt, **kw).replace("INSERT", "UPSERT", 1)
42 |
43 |
44 | class YqlDDLCompiler(BaseYqlDDLCompiler):
45 | ...
46 |
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | import setuptools
3 |
4 | with open("README.md") as f:
5 | long_description = f.read()
6 |
7 | with open("requirements.txt") as f:
8 | requirements = []
9 | for line in f.readlines():
10 | line = line.strip()
11 | if line:
12 | requirements.append(line)
13 |
14 | setuptools.setup(
15 | name="ydb-sqlalchemy",
16 | version="0.1.14", # AUTOVERSION
17 | description="YDB Dialect for SQLAlchemy",
18 | author="Yandex LLC",
19 | author_email="ydb@yandex-team.ru",
20 | url="http://github.com/ydb-platform/ydb-sqlalchemy",
21 | license="Apache 2.0",
22 | package_dir={"": "."},
23 | long_description=long_description,
24 | long_description_content_type="text/markdown",
25 | packages=setuptools.find_packages("."),
26 | classifiers=[
27 | "Programming Language :: Python",
28 | "Programming Language :: Python :: 3",
29 | "Programming Language :: Python :: 3.8",
30 | ],
31 | keywords="SQLAlchemy YDB YQL",
32 | install_requires=requirements, # requirements.txt
33 | options={"bdist_wheel": {"universal": True}},
34 | extras_require={
35 | "yc": [
36 | "yandexcloud",
37 | ],
38 | },
39 | entry_points={
40 | "sqlalchemy.dialects": [
41 | "yql.ydb=ydb_sqlalchemy.sqlalchemy:YqlDialect",
42 | "yql.ydb_async=ydb_sqlalchemy.sqlalchemy:AsyncYqlDialect",
43 | "ydb_async=ydb_sqlalchemy.sqlalchemy:AsyncYqlDialect",
44 | "ydb=ydb_sqlalchemy.sqlalchemy:YqlDialect",
45 | "yql=ydb_sqlalchemy.sqlalchemy:YqlDialect",
46 | ]
47 | },
48 | )
49 |
--------------------------------------------------------------------------------
/docs/README.md:
--------------------------------------------------------------------------------
1 | # YDB SQLAlchemy Documentation
2 |
3 | This directory contains the documentation for YDB SQLAlchemy dialect.
4 |
5 | ## Building Documentation
6 |
7 | ### Prerequisites
8 |
9 | 1. Install Sphinx and required extensions:
10 | ```bash
11 | pip install sphinx sphinx-rtd-theme sphinx-copybutton
12 | ```
13 |
14 | ### Building HTML Documentation
15 |
16 | 1. Navigate to the docs directory:
17 | ```bash
18 | cd docs
19 | ```
20 |
21 | 2. Build the documentation:
22 | ```bash
23 | make html
24 | ```
25 |
26 | 3. Open the documentation in your browser:
27 | ```bash
28 | open .build/html/index.html
29 | ```
30 |
31 | ### Building Other Formats
32 |
33 | - **PDF**: `make latexpdf` (requires LaTeX)
34 | - **EPUB**: `make epub`
35 | - **Man pages**: `make man`
36 |
37 | ### Development
38 |
39 | When adding new documentation:
40 |
41 | 1. Create `.rst` files in the appropriate directory
42 | 2. Add them to the `toctree` in `index.rst`
43 | 3. Rebuild with `make html`
44 | 4. Check for warnings and fix them
45 |
46 | ### Structure
47 |
48 | - `index.rst` - Main documentation page
49 | - `installation.rst` - Installation guide
50 | - `quickstart.rst` - Quick start guide
51 | - `connection.rst` - Connection configuration
52 | - `types.rst` - Data types documentation
53 | - `migrations.rst` - Alembic migrations guide
54 | - `api/` - API reference documentation
55 | - `conf.py` - Sphinx configuration
56 | - `_static/` - Static files (images, CSS, etc.)
57 |
58 | ### Configuration
59 |
60 | The documentation is configured in `conf.py`. Key settings:
61 |
62 | - **Theme**: `sphinx_rtd_theme` (Read the Docs theme)
63 | - **Extensions**: autodoc, napoleon, intersphinx, copybutton
64 | - **Intersphinx**: Links to Python, SQLAlchemy, and Alembic docs
65 |
66 | ### Troubleshooting
67 |
68 | **Sphinx not found**: Make sure Sphinx is installed in your virtual environment
69 |
70 | **Import errors**: Ensure the YDB SQLAlchemy package is installed in the same environment
71 |
72 | **Theme issues**: Install `sphinx-rtd-theme` if you get theme-related errors
73 |
--------------------------------------------------------------------------------
/test/test_orm.py:
--------------------------------------------------------------------------------
1 | from types import MethodType
2 |
3 | import pytest
4 | import sqlalchemy as sa
5 | from sqlalchemy import Column, Integer, Unicode
6 | from sqlalchemy.orm import declarative_base, sessionmaker
7 | from sqlalchemy.testing.fixtures import TablesTest, config
8 |
9 |
10 | class TestDirectories(TablesTest):
11 | __backend__ = True
12 |
13 | def prepare_table(self, engine):
14 | base = declarative_base()
15 |
16 | class Table(base):
17 | __tablename__ = "dir/test"
18 | id = Column(Integer, primary_key=True)
19 | text = Column(Unicode)
20 |
21 | base.metadata.create_all(engine)
22 | session = sessionmaker(bind=engine)()
23 | session.add(Table(id=2, text="foo"))
24 | session.commit()
25 | return base, Table, session
26 |
27 | def try_update(self, session, Table):
28 | row = session.query(Table).first()
29 | row.text = "bar"
30 | session.commit()
31 | return row
32 |
33 | def drop_table(self, base, engine):
34 | base.metadata.drop_all(engine)
35 |
36 | def bind_old_method_to_dialect(self, dialect):
37 | def _handle_column_name(self, variable):
38 | return variable
39 |
40 | dialect._handle_column_name = MethodType(_handle_column_name, dialect)
41 |
42 | def test_directories(self):
43 | engine_good = sa.create_engine(config.db_url)
44 | base, Table, session = self.prepare_table(engine_good)
45 | row = self.try_update(session, Table)
46 | assert row.id == 2
47 | assert row.text == "bar"
48 | self.drop_table(base, engine_good)
49 |
50 | engine_bad = sa.create_engine(config.db_url)
51 | self.bind_old_method_to_dialect(engine_bad.dialect)
52 | base, Table, session = self.prepare_table(engine_bad)
53 | with pytest.raises(Exception) as excinfo:
54 | self.try_update(session, Table)
55 | assert "Unknown name: $dir" in str(excinfo.value)
56 | self.drop_table(base, engine_bad)
57 |
--------------------------------------------------------------------------------
/tox.ini:
--------------------------------------------------------------------------------
1 | [tox]
2 | envlist = test,test-all,test-dialect,test-unit,black,black-format,style,coverage
3 | minversion = 4.2.6
4 | skipsdist = True
5 | ignore_basepython_conflict = true
6 |
7 | [testenv]
8 | usedevelop = True
9 | install_command = pip install {opts} {packages}
10 | setenv =
11 | PYTHONPATH = {env:PYTHONPATH}{:}{toxinidir}
12 | deps =
13 | -r{toxinidir}/test-requirements.txt
14 |
15 | [testenv:test]
16 | ignore_errors = True
17 | commands =
18 | docker-compose up -d
19 | python {toxinidir}/wait_container_ready.py
20 | pytest -v {posargs}
21 | docker-compose down
22 |
23 | [testenv:test-all]
24 | ignore_errors = True
25 | commands =
26 | docker-compose up -d
27 | python {toxinidir}/wait_container_ready.py
28 | pytest -v test --dbdriver ydb --dbdriver ydb_async
29 | pytest -v ydb_sqlalchemy
30 | docker-compose down -v
31 |
32 | [testenv:test-dialect]
33 | commands =
34 | docker-compose up -d
35 | python {toxinidir}/wait_container_ready.py
36 | pytest -v test --dbdriver ydb --dbdriver ydb_async
37 | docker-compose down -v
38 |
39 | [testenv:test-unit]
40 | commands =
41 | pytest -v {toxinidir}/ydb_sqlalchemy
42 |
43 | [testenv:coverage]
44 | ignore_errors = True
45 | commands =
46 | docker-compose up -d
47 | python {toxinidir}/wait_container_ready.py
48 | pytest -v --cov-report html:cov_html --cov=ydb_sqlalchemy {posargs}
49 | docker-compose down
50 |
51 | [testenv:black]
52 | skip_install = true
53 | commands =
54 | black --diff --check ydb_sqlalchemy examples/basic_example test
55 |
56 | [testenv:black-format]
57 | skip_install = true
58 | commands =
59 | black ydb_sqlalchemy examples/basic_example test
60 |
61 | [testenv:isort]
62 | skip_install = true
63 | commands =
64 | isort ydb_sqlalchemy examples/basic_example test
65 |
66 | [testenv:style]
67 | ignore_errors = True
68 | commands =
69 | flake8 ydb_sqlalchemy examples/basic_example test
70 |
71 | [flake8]
72 | show-source = true
73 | builtins = _
74 | max-line-length = 120
75 | ignore=E203,W503
76 | per-file-ignores =
77 | ydb_sqlalchemy/__init__.py: F401
78 | ydb_sqlalchemy/sqlalchemy/compiler/__init__.py: F401
79 | exclude=*_pb2.py,*_grpc.py,.venv,.git,.tox,dist,doc,*egg,docs/*
80 |
--------------------------------------------------------------------------------
/CHANGELOG.md:
--------------------------------------------------------------------------------
1 | ## 0.1.14 ##
2 | * Add ability to propagate RetrySettings
3 |
4 | ## 0.1.13 ##
5 | * Add TupleType mapping support for YDB
6 |
7 | ## 0.1.12 ##
8 | * Fix sa 2.0.44 compatibility
9 |
10 | ## 0.1.11 ##
11 | * Date32, Datetime64 and Timestamp64 support
12 |
13 | ## 0.1.10 ##
14 | * YDB Decimal support
15 |
16 | ## 0.1.9 ##
17 | * Implement YDB specific concat
18 |
19 | ## 0.1.8 ##
20 | * Fix async cursor close method
21 |
22 | ## 0.1.7 ##
23 | * Fix async cursor fetch methods
24 |
25 | ## 0.1.6 ##
26 | * Bump ydb-dbapi version to 0.1.7
27 |
28 | ## 0.1.5 ##
29 | * Bump ydb-dbapi version
30 |
31 | ## 0.1.4 ##
32 | * Add slash to DB name
33 |
34 | ## 0.1.3 ##
35 | * Fix declare param_name cutting
36 |
37 | ## 0.1.2 ##
38 | * Bump DBAPI version
39 |
40 | ## 0.1.1 ##
41 | * sqlalchemy 1.4+ partial support
42 |
43 | ## 0.1.1b1 ##
44 | * Attempt to support sqlalchemy 1.4+
45 |
46 | ## 0.1.0 ##
47 | * Update DBAPI to QueryService
48 |
49 | ## 0.0.1b23 ##
50 | * Add request settings to execution options
51 |
52 | ## 0.0.1b22 ##
53 | * Get rid of logging queries in cursor
54 |
55 | ## 0.0.1b21 ##
56 | * Add support of DROP INDEX statement
57 |
58 | ## 0.0.1b20 ##
59 | * sqlalchemy's DATETIME type now rendered as YDB's Datetime instead of Timestamp
60 |
61 | ## 0.0.1b19 ##
62 | * Do not use set for columns in index, use dict (preserve order)
63 |
64 | ## 0.0.1b18 ##
65 | * Supprted scan query
66 | * Added use sqlalchemy cache query text internally
67 |
68 | ## 0.0.1b17 ##
69 | * Fixed false cache hit
70 |
71 | ## 0.0.1b16 ##
72 | * Added ydb_table_path_prefix parameter
73 |
74 | ## 0.0.1b15 ##
75 | * Added support of timezone
76 |
77 | ## 0.0.1b14 ##
78 | * Added secondary index support
79 |
80 | ## 0.0.1b13 ##
81 | * Added declare for yql statement variables (opt in) - temporary flag
82 |
83 | ## 0.0.1b12 ##
84 | * supported ydb connection credentials
85 |
86 | ## 0.0.1b11 ##
87 | * test release
88 |
89 | ## 0.0.1b10 ##
90 | * test release
91 |
92 | ## 0.0.1b9 ##
93 | * test release
94 |
95 | ## 0.0.1b8 ##
96 | * Improve publish script
97 |
98 | ## 0.0.1b6 ##
99 | * Fixed import version
100 |
101 | ## 0.0.1b5 ##
102 | * Initial version
103 |
--------------------------------------------------------------------------------
/ydb_sqlalchemy/sqlalchemy/datetime_types.py:
--------------------------------------------------------------------------------
1 | import datetime
2 | from typing import Optional
3 |
4 | from sqlalchemy import types as sqltypes
5 |
6 |
7 | class YqlDate(sqltypes.Date):
8 | def literal_processor(self, dialect):
9 | parent = super().literal_processor(dialect)
10 |
11 | def process(value):
12 | return f"Date({parent(value)})"
13 |
14 | return process
15 |
16 |
17 | class YqlTimestamp(sqltypes.TIMESTAMP):
18 | def result_processor(self, dialect, coltype):
19 | def process(value: Optional[datetime.datetime]) -> Optional[datetime.datetime]:
20 | if value is None:
21 | return None
22 | if not self.timezone:
23 | return value
24 | return value.replace(tzinfo=datetime.timezone.utc)
25 |
26 | return process
27 |
28 |
29 | class YqlDateTime(YqlTimestamp, sqltypes.DATETIME):
30 | def bind_processor(self, dialect):
31 | def process(value: Optional[datetime.datetime]) -> Optional[int]:
32 | if value is None:
33 | return None
34 | if not self.timezone: # if timezone is disabled, consider it as utc
35 | value = value.replace(tzinfo=datetime.timezone.utc)
36 | return int(value.timestamp())
37 |
38 | return process
39 |
40 |
41 | class YqlDate32(YqlDate):
42 | __visit_name__ = "date32"
43 |
44 | def literal_processor(self, dialect):
45 | parent = super().literal_processor(dialect)
46 |
47 | def process(value):
48 | return f"Date32({parent(value)})"
49 |
50 | return process
51 |
52 |
53 | class YqlTimestamp64(YqlTimestamp):
54 | __visit_name__ = "timestamp64"
55 |
56 | def literal_processor(self, dialect):
57 | parent = super().literal_processor(dialect)
58 |
59 | def process(value):
60 | return f"Timestamp64({parent(value)})"
61 |
62 | return process
63 |
64 |
65 | class YqlDateTime64(YqlDateTime):
66 | __visit_name__ = "datetime64"
67 |
68 | def literal_processor(self, dialect):
69 | parent = super().literal_processor(dialect)
70 |
71 | def process(value):
72 | return f"DateTime64({parent(value)})"
73 |
74 | return process
75 |
--------------------------------------------------------------------------------
/docs/_static/logo.svg:
--------------------------------------------------------------------------------
1 |
5 |
--------------------------------------------------------------------------------
/docs/installation.rst:
--------------------------------------------------------------------------------
1 | Installation
2 | ============
3 |
4 | This guide covers the installation of YDB SQLAlchemy dialect and its dependencies.
5 |
6 | Requirements
7 | ------------
8 |
9 | * Python 3.7 or higher
10 | * SQLAlchemy 1.4+ or 2.0+ (recommended)
11 | * YDB Python SDK
12 |
13 | Installing from PyPI
14 | ---------------------
15 |
16 | The easiest way to install YDB SQLAlchemy is using pip:
17 |
18 | .. code-block:: bash
19 |
20 | pip install ydb-sqlalchemy
21 |
22 | This will install the YDB SQLAlchemy dialect along with all required dependencies.
23 |
24 | Installing from Source
25 | ----------------------
26 |
27 | If you want to install the latest development version or contribute to the project:
28 |
29 | 1. Clone the repository:
30 |
31 | .. code-block:: bash
32 |
33 | git clone https://github.com/ydb-platform/ydb-sqlalchemy.git
34 | cd ydb-sqlalchemy
35 |
36 | 2. Install in development mode:
37 |
38 | .. code-block:: bash
39 |
40 | pip install -e .
41 |
42 | Or install directly:
43 |
44 | .. code-block:: bash
45 |
46 | pip install .
47 |
48 |
49 | Verifying Installation
50 | ----------------------
51 |
52 | To verify that YDB SQLAlchemy is installed correctly:
53 |
54 | .. code-block:: python
55 |
56 | import ydb_sqlalchemy
57 | import sqlalchemy as sa
58 |
59 | # Check if the dialect is available
60 | engine = sa.create_engine("yql+ydb://localhost:2136/local")
61 | print("YDB SQLAlchemy installed successfully!")
62 |
63 | Docker Setup for Development
64 | -----------------------------
65 |
66 | For development and testing, you can use Docker to run a local YDB instance:
67 |
68 | 1. Clone the repository and navigate to the project directory
69 | 2. Start YDB using ``docker compose``:
70 |
71 | .. code-block:: bash
72 |
73 | docker compose up -d
74 |
75 | This will start a YDB instance accessible at ``localhost:2136``.
76 |
77 | Getting Help
78 | ~~~~~~~~~~~~
79 |
80 | If you encounter issues during installation:
81 |
82 | 1. Check the `GitHub Issues `_
83 | 2. Review the `YDB documentation `_
84 | 3. Create a new issue with detailed error information
85 |
86 | Next Steps
87 | ----------
88 |
89 | After successful installation, proceed to the :doc:`quickstart` guide to learn how to use YDB SQLAlchemy in your projects.
90 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Byte-compiled / optimized / DLL files
2 | __pycache__/
3 | *.py[cod]
4 | *$py.class
5 |
6 | # C extensions
7 | *.so
8 |
9 | # Distribution / packaging
10 | .Python
11 | build/
12 | develop-eggs/
13 | dist/
14 | downloads/
15 | eggs/
16 | .eggs/
17 | lib/
18 | lib64/
19 | parts/
20 | sdist/
21 | var/
22 | wheels/
23 | pip-wheel-metadata/
24 | share/python-wheels/
25 | *.egg-info/
26 | .installed.cfg
27 | *.egg
28 | MANIFEST
29 |
30 | # PyInstaller
31 | # Usually these files are written by a python script from a template
32 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
33 | *.manifest
34 | *.spec
35 |
36 | # Installer logs
37 | pip-log.txt
38 | pip-delete-this-directory.txt
39 |
40 | # Unit test / coverage reports
41 | htmlcov/
42 | .tox/
43 | .nox/
44 | .coverage
45 | .coverage.*
46 | .cache
47 | nosetests.xml
48 | coverage.xml
49 | *.cover
50 | *.py,cover
51 | .hypothesis/
52 | .pytest_cache/
53 |
54 | # Translations
55 | *.mo
56 | *.pot
57 |
58 | # Django stuff:
59 | *.log
60 | local_settings.py
61 | db.sqlite3
62 | db.sqlite3-journal
63 |
64 | # Flask stuff:
65 | instance/
66 | .webassets-cache
67 |
68 | # Scrapy stuff:
69 | .scrapy
70 |
71 | # Sphinx documentation
72 | docs/_build/
73 |
74 | # PyBuilder
75 | target/
76 |
77 | # Jupyter Notebook
78 | .ipynb_checkpoints
79 |
80 | # IPython
81 | profile_default/
82 | ipython_config.py
83 |
84 | # pyenv
85 | .python-version
86 |
87 | # pipenv
88 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
89 | # However, in case of collaboration, if having platform-specific dependencies or dependencies
90 | # having no cross-platform support, pipenv may install dependencies that don't work, or not
91 | # install all needed dependencies.
92 | #Pipfile.lock
93 |
94 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow
95 | __pypackages__/
96 |
97 | # Celery stuff
98 | celerybeat-schedule
99 | celerybeat.pid
100 |
101 | # SageMath parsed files
102 | *.sage.py
103 |
104 | # Environments
105 | .env
106 | .venv
107 | env/
108 | venv/
109 | ENV/
110 | env.bak/
111 | venv.bak/
112 |
113 | # Spyder project settings
114 | .spyderproject
115 | .spyproject
116 |
117 | # Rope project settings
118 | .ropeproject
119 |
120 | # mkdocs documentation
121 | /site
122 |
123 | # mypy
124 | .mypy_cache/
125 | .dmypy.json
126 | dmypy.json
127 |
128 | # Pyre type checker
129 | .pyre/
130 |
131 | # PyCharm
132 | .idea/
133 |
134 | # VSCode
135 | .vscode
136 |
137 | docs/.build/
138 | .DS_Store
--------------------------------------------------------------------------------
/examples/alembic/README.md:
--------------------------------------------------------------------------------
1 | # Alembic support
2 |
3 | In this example we'll see how to use `alembic` with ydb.
4 |
5 | ## Installation
6 |
7 | To make `alembic` work with `YDB` tables please follow these steps:
8 |
9 | * Install `ydb-sqlalchemy` package from PyPi:
10 |
11 | ```bash
12 | pip install ydb-sqlalchemy`
13 | ```
14 |
15 | * Install `alembic` package from PyPi:
16 |
17 | ```bash
18 | pip install alembic
19 | ```
20 |
21 | ## Preparation
22 |
23 | We have to setup `alembic` correctly.
24 | First of all, we should register `YDB` dialect in `env.py`:
25 |
26 | ```python3
27 | from alembic.ddl.impl import DefaultImpl
28 |
29 |
30 | class YDBImpl(DefaultImpl):
31 | __dialect__ = "yql"
32 | ```
33 |
34 | Secondly, since `YDB` do not support updating primary key columns, we have to update alembic table structure.
35 | For this purpose we should update `run_migrations_online` method in `env.py`:
36 |
37 | ```python3
38 | def run_migrations_online() -> None:
39 | """Run migrations in 'online' mode.
40 |
41 | In this scenario we need to create an Engine
42 | and associate a connection with the context.
43 |
44 | """
45 | connectable = engine_from_config(
46 | config.get_section(config.config_ini_section, {}),
47 | prefix="sqlalchemy.",
48 | poolclass=pool.NullPool,
49 | )
50 |
51 | with connectable.connect() as connection:
52 | context.configure(
53 | connection=connection, target_metadata=target_metadata
54 | )
55 |
56 | ctx = context.get_context()
57 | ctx._version = sa.Table( # noqa: SLF001
58 | ctx.version_table,
59 | sa.MetaData(),
60 | sa.Column("version_num", sa.String(32), nullable=False),
61 | sa.Column("id", sa.Integer(), nullable=True, primary_key=True),
62 | )
63 |
64 | with context.begin_transaction():
65 | context.run_migrations()
66 | ```
67 |
68 | ## Example
69 |
70 | To run this example:
71 | 1. Install all dependencies described in `Installation` section.
72 | 1. Update `sqlalchemy.url` field in `alembic.ini` config file.
73 | 1. Run `alembic upgrade head` to apply all migrations:
74 |
75 | ```bash
76 | alembic upgrade head
77 |
78 | INFO [alembic.runtime.migration] Context impl YDBImpl.
79 | INFO [alembic.runtime.migration] Will assume non-transactional DDL.
80 | INFO [alembic.runtime.migration] Running upgrade -> d91d9200b65c, create series table
81 | INFO [alembic.runtime.migration] Running upgrade d91d9200b65c -> 820b994ffa7c, create seasons table
82 | INFO [alembic.runtime.migration] Running upgrade 820b994ffa7c -> 9085c679f5dc, create episodes table
83 | ```
84 |
85 | To create new migration just add a few changes in `models.py` and run:
86 | ```bash
87 | alembic revision --autogenerate -m "name of your migration"
88 | ```
89 |
--------------------------------------------------------------------------------
/examples/alembic/migrations/env.py:
--------------------------------------------------------------------------------
1 | from logging.config import fileConfig
2 |
3 | import sqlalchemy as sa
4 | from sqlalchemy import engine_from_config
5 | from sqlalchemy import pool
6 |
7 | from alembic import context
8 | from alembic.ddl.impl import DefaultImpl
9 |
10 |
11 | # this is the Alembic Config object, which provides
12 | # access to the values within the .ini file in use.
13 | config = context.config
14 |
15 | # Interpret the config file for Python logging.
16 | # This line sets up loggers basically.
17 | if config.config_file_name is not None:
18 | fileConfig(config.config_file_name)
19 |
20 | # add your model's MetaData object here
21 | # for 'autogenerate' support
22 | # from myapp import mymodel
23 | # target_metadata = mymodel.Base.metadata
24 |
25 | from models import *
26 | target_metadata = Base.metadata
27 |
28 | # other values from the config, defined by the needs of env.py,
29 | # can be acquired:
30 | # my_important_option = config.get_main_option("my_important_option")
31 | # ... etc.
32 |
33 |
34 | class YDBImpl(DefaultImpl):
35 | __dialect__ = "yql"
36 |
37 |
38 | def run_migrations_offline() -> None:
39 | """Run migrations in 'offline' mode.
40 |
41 | This configures the context with just a URL
42 | and not an Engine, though an Engine is acceptable
43 | here as well. By skipping the Engine creation
44 | we don't even need a DBAPI to be available.
45 |
46 | Calls to context.execute() here emit the given string to the
47 | script output.
48 |
49 | """
50 | url = config.get_main_option("sqlalchemy.url")
51 | context.configure(
52 | url=url,
53 | target_metadata=target_metadata,
54 | literal_binds=True,
55 | dialect_opts={"paramstyle": "named"},
56 | )
57 |
58 | with context.begin_transaction():
59 | context.run_migrations()
60 |
61 |
62 | def run_migrations_online() -> None:
63 | """Run migrations in 'online' mode.
64 |
65 | In this scenario we need to create an Engine
66 | and associate a connection with the context.
67 |
68 | """
69 | connectable = engine_from_config(
70 | config.get_section(config.config_ini_section, {}),
71 | prefix="sqlalchemy.",
72 | poolclass=pool.NullPool,
73 | )
74 |
75 | with connectable.connect() as connection:
76 | context.configure(
77 | connection=connection, target_metadata=target_metadata
78 | )
79 |
80 | ctx = context.get_context()
81 | ctx._version = sa.Table( # noqa: SLF001
82 | ctx.version_table,
83 | sa.MetaData(),
84 | sa.Column("version_num", sa.String(32), nullable=False),
85 | sa.Column("id", sa.Integer(), nullable=True, primary_key=True),
86 | )
87 |
88 | with context.begin_transaction():
89 | context.run_migrations()
90 |
91 |
92 | if context.is_offline_mode():
93 | run_migrations_offline()
94 | else:
95 | run_migrations_online()
96 |
--------------------------------------------------------------------------------
/docs/index.rst:
--------------------------------------------------------------------------------
1 | YDB SQLAlchemy Documentation
2 | ============================
3 |
4 | Welcome to the YDB SQLAlchemy dialect documentation. This package provides a SQLAlchemy dialect for YDB (Yandex Database), allowing you to use SQLAlchemy ORM and Core with YDB databases.
5 |
6 | .. image:: https://img.shields.io/badge/License-Apache%202.0-blue.svg
7 | :target: https://github.com/ydb-platform/ydb-sqlalchemy/blob/main/LICENSE
8 | :alt: License
9 |
10 | .. image:: https://badge.fury.io/py/ydb-sqlalchemy.svg
11 | :target: https://badge.fury.io/py/ydb-sqlalchemy
12 | :alt: PyPI version
13 |
14 | .. image:: https://github.com/ydb-platform/ydb-sqlalchemy/actions/workflows/tests.yml/badge.svg
15 | :target: https://github.com/ydb-platform/ydb-sqlalchemy/actions/workflows/tests.yml
16 | :alt: Functional tests
17 |
18 | Overview
19 | --------
20 |
21 | YDB SQLAlchemy is a dialect that enables SQLAlchemy to work with YDB databases. It supports both SQLAlchemy 2.0 (fully tested) and SQLAlchemy 1.4 (partially tested).
22 |
23 | Key Features:
24 | ~~~~~~~~~~~~~
25 |
26 | * **SQLAlchemy 2.0 Support**: Full compatibility with the latest SQLAlchemy version
27 | * **Async/Await Support**: Full async support with ``yql+ydb_async`` dialect
28 | * **Core and ORM**: Support for both SQLAlchemy Core and ORM patterns
29 | * **Authentication**: Multiple authentication methods including static credentials, tokens, and service accounts
30 | * **Type System**: Comprehensive YDB type mapping to SQLAlchemy types
31 | * **Migrations**: Alembic integration for database schema migrations
32 | * **Pandas Integration**: Compatible with pandas DataFrame operations
33 |
34 | Quick Examples
35 | ~~~~~~~~~~~~~~
36 |
37 | **Synchronous:**
38 |
39 | .. code-block:: python
40 |
41 | import sqlalchemy as sa
42 |
43 | # Create engine
44 | engine = sa.create_engine("yql+ydb://localhost:2136/local")
45 |
46 | # Execute query
47 | with engine.connect() as conn:
48 | result = conn.execute(sa.text("SELECT 1 AS value"))
49 | print(result.fetchone())
50 |
51 | **Asynchronous:**
52 |
53 | .. code-block:: python
54 |
55 | import asyncio
56 | from sqlalchemy.ext.asyncio import create_async_engine
57 |
58 | async def main():
59 | # Create async engine
60 | engine = create_async_engine("yql+ydb_async://localhost:2136/local")
61 |
62 | # Execute query
63 | async with engine.connect() as conn:
64 | result = await conn.execute(sa.text("SELECT 1 AS value"))
65 | print(await result.fetchone())
66 |
67 | asyncio.run(main())
68 |
69 | Table of Contents
70 | -----------------
71 |
72 | .. toctree::
73 | :maxdepth: 2
74 | :caption: User Guide
75 |
76 | installation
77 | quickstart
78 | connection
79 | types
80 | migrations
81 |
82 | .. toctree::
83 | :maxdepth: 2
84 | :caption: API Reference
85 |
86 | api/index
87 |
88 | Indices and tables
89 | ==================
90 |
91 | * :ref:`genindex`
92 | * :ref:`modindex`
93 | * :ref:`search`
94 |
--------------------------------------------------------------------------------
/ydb_sqlalchemy/sqlalchemy/requirements.py:
--------------------------------------------------------------------------------
1 | from sqlalchemy.testing import exclusions
2 | from sqlalchemy.testing.requirements import SuiteRequirements
3 |
4 |
5 | class Requirements(SuiteRequirements):
6 | @property
7 | def json_type(self):
8 | return exclusions.open()
9 |
10 | @property
11 | def array_type(self):
12 | return exclusions.closed()
13 |
14 | @property
15 | def uuid_data_type(self):
16 | return exclusions.open()
17 |
18 | @property
19 | def foreign_keys(self):
20 | # foreign keys unsupported
21 | return exclusions.closed()
22 |
23 | @property
24 | def self_referential_foreign_keys(self):
25 | return exclusions.closed()
26 |
27 | @property
28 | def foreign_key_ddl(self):
29 | return exclusions.closed()
30 |
31 | @property
32 | def foreign_key_constraint_reflection(self):
33 | return exclusions.closed()
34 |
35 | @property
36 | def temp_table_reflection(self):
37 | return exclusions.closed()
38 |
39 | @property
40 | def temporary_tables(self):
41 | return exclusions.closed()
42 |
43 | @property
44 | def temporary_views(self):
45 | return exclusions.closed()
46 |
47 | @property
48 | def index_reflection(self):
49 | # Reflection supported with limits
50 | return exclusions.closed()
51 |
52 | @property
53 | def view_reflection(self):
54 | return exclusions.closed()
55 |
56 | @property
57 | def unique_constraint_reflection(self):
58 | return exclusions.closed()
59 |
60 | @property
61 | def insert_returning(self):
62 | return exclusions.closed()
63 |
64 | @property
65 | def autoincrement_insert(self):
66 | # YDB doesn't support autoincrement
67 | return exclusions.closed()
68 |
69 | @property
70 | def autoincrement_without_sequence(self):
71 | # YDB doesn't support autoincrement
72 | return exclusions.closed()
73 |
74 | @property
75 | def duplicate_names_in_cursor_description(self):
76 | return exclusions.closed()
77 |
78 | @property
79 | def regexp_match(self):
80 | return exclusions.open()
81 |
82 | @property
83 | def table_value_constructor(self):
84 | return exclusions.open()
85 |
86 | @property
87 | def named_constraints(self):
88 | return exclusions.closed()
89 |
90 | @property
91 | def timestamp_microseconds(self):
92 | return exclusions.open()
93 |
94 | @property
95 | def mod_operator_as_percent_sign(self):
96 | return exclusions.open()
97 |
98 | @property
99 | def parens_in_union_contained_select_w_limit_offset(self):
100 | """union with brackets don't work"""
101 | return exclusions.closed()
102 |
103 | @property
104 | def parens_in_union_contained_select_wo_limit_offset(self):
105 | """union with brackets don't work"""
106 | return exclusions.closed()
107 |
--------------------------------------------------------------------------------
/examples/basic_example/fill_tables.py:
--------------------------------------------------------------------------------
1 | import iso8601
2 | import sqlalchemy as sa
3 | from models import Base, Episodes, Seasons, Series
4 |
5 |
6 | def to_days(date):
7 | timedelta = iso8601.parse_date(date) - iso8601.parse_date("1970-1-1")
8 | return timedelta.days
9 |
10 |
11 | def fill_series(conn):
12 | data = [
13 | (
14 | 1,
15 | "IT Crowd",
16 | "The IT Crowd is a British sitcom produced by Channel 4, written by Graham Linehan, produced by "
17 | "Ash Atalla and starring Chris O'Dowd, Richard Ayoade, Katherine Parkinson, and Matt Berry.",
18 | to_days("2006-02-03"),
19 | ),
20 | (
21 | 2,
22 | "Silicon Valley",
23 | "Silicon Valley is an American comedy television series created by Mike Judge, John Altschuler and "
24 | "Dave Krinsky. The series focuses on five young men who founded a startup company in Silicon Valley.",
25 | to_days("2014-04-06"),
26 | ),
27 | ]
28 | conn.execute(sa.insert(Series).values(data))
29 |
30 |
31 | def fill_seasons(conn):
32 | data = [
33 | (1, 1, "Season 1", to_days("2006-02-03"), to_days("2006-03-03")),
34 | (1, 2, "Season 2", to_days("2007-08-24"), to_days("2007-09-28")),
35 | (1, 3, "Season 3", to_days("2008-11-21"), to_days("2008-12-26")),
36 | (1, 4, "Season 4", to_days("2010-06-25"), to_days("2010-07-30")),
37 | (2, 1, "Season 1", to_days("2014-04-06"), to_days("2014-06-01")),
38 | (2, 2, "Season 2", to_days("2015-04-12"), to_days("2015-06-14")),
39 | (2, 3, "Season 3", to_days("2016-04-24"), to_days("2016-06-26")),
40 | (2, 4, "Season 4", to_days("2017-04-23"), to_days("2017-06-25")),
41 | (2, 5, "Season 5", to_days("2018-03-25"), to_days("2018-05-13")),
42 | ]
43 | conn.execute(sa.insert(Seasons).values(data))
44 |
45 |
46 | def fill_episodes(conn):
47 | data = [
48 | (1, 1, 1, "Yesterday's Jam", to_days("2006-02-03")),
49 | (1, 1, 2, "Calamity Jen", to_days("2006-02-03")),
50 | (1, 1, 3, "Fifty-Fifty", to_days("2006-02-10")),
51 | (1, 1, 4, "The Red Door", to_days("2006-02-17")),
52 | (1, 1, 5, "The Haunting of Bill Crouse", to_days("2006-02-24")),
53 | (1, 1, 6, "Aunt Irma Visits", to_days("2006-03-03")),
54 | (1, 2, 1, "The Work Outing", to_days("2006-08-24")),
55 | (1, 2, 2, "Return of the Golden Child", to_days("2007-08-31")),
56 | (1, 2, 3, "Moss and the German", to_days("2007-09-07")),
57 | (1, 2, 4, "The Dinner Party", to_days("2007-09-14")),
58 | (1, 2, 5, "Smoke and Mirrors", to_days("2007-09-21")),
59 | (1, 2, 6, "Men Without Women", to_days("2007-09-28")),
60 | (1, 3, 1, "From Hell", to_days("2008-11-21")),
61 | (1, 3, 2, "Are We Not Men?", to_days("2008-11-28")),
62 | (1, 3, 3, "Tramps Like Us", to_days("2008-12-05")),
63 | (1, 3, 4, "The Speech", to_days("2008-12-12")),
64 | (1, 3, 5, "Friendface", to_days("2008-12-19")),
65 | (1, 3, 6, "Calendar Geeks", to_days("2008-12-26")),
66 | (1, 4, 1, "Jen The Fredo", to_days("2010-06-25")),
67 | (1, 4, 2, "The Final Countdown", to_days("2010-07-02")),
68 | (1, 4, 3, "Something Happened", to_days("2010-07-09")),
69 | (1, 4, 4, "Italian For Beginners", to_days("2010-07-16")),
70 | (1, 4, 5, "Bad Boys", to_days("2010-07-23")),
71 | (1, 4, 6, "Reynholm vs Reynholm", to_days("2010-07-30")),
72 | ]
73 | conn.execute(sa.insert(Episodes).values(data))
74 |
75 |
76 | def fill_all_tables(conn):
77 | Base.metadata.drop_all(conn.engine)
78 | Base.metadata.create_all(conn.engine)
79 |
80 | fill_series(conn)
81 | fill_seasons(conn)
82 | fill_episodes(conn)
83 |
--------------------------------------------------------------------------------
/ydb_sqlalchemy/sqlalchemy/compiler/sa20.py:
--------------------------------------------------------------------------------
1 | import sqlalchemy as sa
2 | import ydb
3 |
4 | from sqlalchemy.exc import CompileError
5 | from sqlalchemy.sql import literal_column
6 | from sqlalchemy.util.compat import inspect_getfullargspec
7 |
8 | from .base import (
9 | BaseYqlCompiler,
10 | BaseYqlDDLCompiler,
11 | BaseYqlIdentifierPreparer,
12 | BaseYqlTypeCompiler,
13 | )
14 | from typing import Union
15 |
16 |
17 | class YqlTypeCompiler(BaseYqlTypeCompiler):
18 | def visit_uuid(self, type_: sa.Uuid, **kw):
19 | return "UTF8"
20 |
21 | def get_ydb_type(
22 | self, type_: sa.types.TypeEngine, is_optional: bool
23 | ) -> Union[ydb.PrimitiveType, ydb.AbstractTypeBuilder]:
24 | if isinstance(type_, sa.TypeDecorator):
25 | type_ = type_.impl
26 |
27 | if isinstance(type_, sa.Uuid):
28 | ydb_type = ydb.PrimitiveType.Utf8
29 | if is_optional:
30 | return ydb.OptionalType(ydb_type)
31 | return ydb_type
32 |
33 | if isinstance(type_, sa.Double):
34 | ydb_type = ydb.PrimitiveType.Double
35 | if is_optional:
36 | return ydb.OptionalType(ydb_type)
37 | return ydb_type
38 |
39 | return super().get_ydb_type(type_, is_optional)
40 |
41 |
42 | class YqlIdentifierPreparer(BaseYqlIdentifierPreparer):
43 | ...
44 |
45 |
46 | class YqlCompiler(BaseYqlCompiler):
47 | _type_compiler_cls = YqlTypeCompiler
48 |
49 | def visit_json_getitem_op_binary(self, binary: sa.BinaryExpression, operator, **kw) -> str:
50 | json_field = self.process(binary.left, **kw)
51 | index = self.process(binary.right, **kw)
52 | return self._yson_convert_to(f"{json_field}[{index}]", binary.type)
53 |
54 | def visit_json_path_getitem_op_binary(self, binary: sa.BinaryExpression, operator, **kw) -> str:
55 | json_field = self.process(binary.left, **kw)
56 | path = self.process(binary.right, **kw)
57 | return self._yson_convert_to(f"Yson::YPath({json_field}, {path})", binary.type)
58 |
59 | def visit_regexp_match_op_binary(self, binary, operator, **kw):
60 | return self._generate_generic_binary(binary, " REGEXP ", **kw)
61 |
62 | def visit_not_regexp_match_op_binary(self, binary, operator, **kw):
63 | return self._generate_generic_binary(binary, " NOT REGEXP ", **kw)
64 |
65 | def visit_lambda(self, lambda_, **kw):
66 | func = lambda_.func
67 | spec = inspect_getfullargspec(func)
68 |
69 | if spec.varargs:
70 | raise CompileError("Lambdas with *args are not supported")
71 | if spec.varkw:
72 | raise CompileError("Lambdas with **kwargs are not supported")
73 |
74 | args = [literal_column("$" + arg) for arg in spec.args]
75 | text = f'({", ".join("$" + arg for arg in spec.args)}) -> ' f"{{ RETURN {self.process(func(*args), **kw)} ;}}"
76 |
77 | return text
78 |
79 | def _yson_convert_to(self, statement: str, target_type: sa.types.TypeEngine) -> str:
80 | type_name = target_type.compile(self.dialect)
81 | if isinstance(target_type, sa.Numeric) and not isinstance(target_type, (sa.Float, sa.Double)):
82 | # Since Decimal is stored in JSON either as String or as Float
83 | string_value = f"Yson::ConvertTo({statement}, Optional, Yson::Options(true AS AutoConvert))"
84 | return f"CAST({string_value} AS Optional<{type_name}>)"
85 | return f"Yson::ConvertTo({statement}, Optional<{type_name}>)"
86 |
87 | def visit_upsert(self, insert_stmt, visited_bindparam=None, **kw):
88 | return self.visit_insert(insert_stmt, visited_bindparam, **kw).replace("INSERT", "UPSERT", 1)
89 |
90 |
91 | class YqlDDLCompiler(BaseYqlDDLCompiler):
92 | ...
93 |
--------------------------------------------------------------------------------
/ydb_sqlalchemy/sqlalchemy/dbapi_adapter.py:
--------------------------------------------------------------------------------
1 | from sqlalchemy.engine.interfaces import AdaptedConnection
2 |
3 | from sqlalchemy.util.concurrency import await_only
4 | from ydb_dbapi import AsyncConnection, AsyncCursor
5 | import ydb
6 |
7 |
8 | class AdaptedAsyncConnection(AdaptedConnection):
9 | def __init__(self, connection: AsyncConnection):
10 | self._connection: AsyncConnection = connection
11 |
12 | @property
13 | def _driver(self):
14 | return self._connection._driver
15 |
16 | @property
17 | def _session_pool(self):
18 | return self._connection._session_pool
19 |
20 | @property
21 | def _tx_context(self):
22 | return self._connection._tx_context
23 |
24 | @property
25 | def _tx_mode(self):
26 | return self._connection._tx_mode
27 |
28 | @property
29 | def interactive_transaction(self):
30 | return self._connection.interactive_transaction
31 |
32 | def cursor(self):
33 | return AdaptedAsyncCursor(self._connection.cursor())
34 |
35 | def begin(self):
36 | return await_only(self._connection.begin())
37 |
38 | def commit(self):
39 | return await_only(self._connection.commit())
40 |
41 | def rollback(self):
42 | return await_only(self._connection.rollback())
43 |
44 | def close(self):
45 | return await_only(self._connection.close())
46 |
47 | def set_isolation_level(self, level):
48 | return self._connection.set_isolation_level(level)
49 |
50 | def get_isolation_level(self):
51 | return self._connection.get_isolation_level()
52 |
53 | def set_ydb_request_settings(self, value: ydb.BaseRequestSettings) -> None:
54 | self._connection.set_ydb_request_settings(value)
55 |
56 | def get_ydb_request_settings(self) -> ydb.BaseRequestSettings:
57 | return self._connection.get_ydb_request_settings()
58 |
59 | def set_ydb_retry_settings(self, value: ydb.RetrySettings) -> None:
60 | self._connection.set_ydb_retry_settings(value)
61 |
62 | def get_ydb_retry_settings(self) -> ydb.RetrySettings:
63 | return self._connection.get_ydb_retry_settings()
64 |
65 | def describe(self, table_path: str):
66 | return await_only(self._connection.describe(table_path))
67 |
68 | def check_exists(self, table_path: str):
69 | return await_only(self._connection.check_exists(table_path))
70 |
71 | def get_table_names(self):
72 | return await_only(self._connection.get_table_names())
73 |
74 |
75 | # TODO(vgvoleg): Migrate to AsyncAdapt_dbapi_cursor and AsyncAdapt_dbapi_connection
76 | class AdaptedAsyncCursor:
77 | _awaitable_cursor_close: bool = False
78 |
79 | def __init__(self, cursor: AsyncCursor):
80 | self._cursor = cursor
81 |
82 | @property
83 | def description(self):
84 | return self._cursor.description
85 |
86 | @property
87 | def arraysize(self):
88 | return self._cursor.arraysize
89 |
90 | @arraysize.setter
91 | def arraysize(self, size: int) -> None:
92 | self._cursor.arraysize = size
93 |
94 | @property
95 | def rowcount(self):
96 | return self._cursor.rowcount
97 |
98 | def fetchone(self):
99 | return self._cursor.fetchone()
100 |
101 | def fetchmany(self, size=None):
102 | return self._cursor.fetchmany(size=size)
103 |
104 | def fetchall(self):
105 | return self._cursor.fetchall()
106 |
107 | def execute_scheme(self, sql, parameters=None):
108 | return await_only(self._cursor.execute_scheme(sql, parameters))
109 |
110 | def execute(self, sql, parameters=None):
111 | return await_only(self._cursor.execute(sql, parameters))
112 |
113 | def executemany(self, sql, parameters=None):
114 | return await_only(self._cursor.executemany(sql, parameters))
115 |
116 | def close(self):
117 | return self._cursor.close()
118 |
119 | def setinputsizes(self, *args):
120 | pass
121 |
122 | def setoutputsizes(self, *args):
123 | pass
124 |
125 | async def _async_soft_close(self) -> None:
126 | pass
127 |
--------------------------------------------------------------------------------
/.github/workflows/python-publish.yml:
--------------------------------------------------------------------------------
1 | # This workflow will upload a Python Package using Twine when a release is created
2 | # For more information see: https://help.github.com/en/actions/language-and-framework-guides/using-python-with-github-actions#publishing-to-package-registries
3 |
4 | # This workflow uses actions that are not certified by GitHub.
5 | # They are provided by a third-party and are governed by
6 | # separate terms of service, privacy policy, and support
7 | # documentation.
8 |
9 | name: Publish package release
10 |
11 | on:
12 | workflow_dispatch:
13 | inputs:
14 | version-change:
15 | description: Version part
16 | required: true
17 | type: choice
18 | default: patch
19 | options:
20 | - minor
21 | - patch
22 | beta:
23 | description: Is beta version
24 | required: true
25 | type: boolean
26 | default: False
27 | jobs:
28 | publish:
29 | env:
30 | VERSION_CHANGE: ${{ github.event.inputs.version-change }}
31 | WITH_BETA: ${{ github.event.inputs.beta }}
32 | GH_TOKEN: ${{ secrets.YDB_PLATFORM_BOT_TOKEN_REPO }}
33 | CHANGELOG_FILE: CHANGELOG.md
34 | SETUP_PY_PATH: setup.py
35 |
36 | permissions:
37 | contents: write
38 | id-token: write # IMPORTANT: this permission is mandatory for trusted publishing
39 |
40 | runs-on: ubuntu-latest
41 |
42 | steps:
43 | - uses: actions/checkout@v3
44 | with:
45 | token: ${{ secrets.YDB_PLATFORM_BOT_TOKEN_REPO }}
46 |
47 | - name: Set up Python
48 | uses: actions/setup-python@v3
49 | with:
50 | python-version: '3.8'
51 |
52 | - name: Install dependencies
53 | run: |
54 | python -m pip install --upgrade pip
55 | pip install build
56 |
57 | - name: read changelog
58 | id: read-changelog
59 | run: |
60 | CHANGELOG=$(cat $CHANGELOG_FILE | sed -e '/^## .*$/,$d')
61 | echo "CHANGELOG<> $GITHUB_ENV
62 | echo "$CHANGELOG" >> $GITHUB_ENV
63 | echo "CHANGELOGEOF_MARKER" >> $GITHUB_ENV
64 | echo "# Changelog" >> $GITHUB_STEP_SUMMARY
65 | echo "$CHANGELOG" >> $GITHUB_STEP_SUMMARY
66 |
67 |
68 | - name: Increment version
69 | id: increment-version
70 | run: |
71 | NEW_VERSION=$(python3 ./.github/scripts/increment_version.py --inc-type=$VERSION_CHANGE --beta=$WITH_BETA)
72 | echo new version: $NEW_VERSION
73 | echo "NEW_VERSION=$NEW_VERSION" >> $GITHUB_OUTPUT
74 | echo "New version: $NEW_VERSION" >> $GITHUB_STEP_SUMMARY
75 |
76 | - name: Build package
77 | run: python -m build
78 |
79 | - name: Publish release on github
80 | run: |
81 | if [[ -z "$CHANGELOG" ]]
82 | then
83 | echo "CHANGELOG empty"
84 | exit 1;
85 | fi;
86 |
87 | TAG="${{ steps.increment-version.outputs.NEW_VERSION }}"
88 |
89 | # Get previous version from changelog
90 | # pre-incremented version not used for consistent changelog with release notes
91 | # for example changelog may be rewrited when switch from beta to release
92 | # and remove internal beta changes
93 | LAST_TAG=$(cat $CHANGELOG_FILE | grep '^## .* ##$' | head -n 2 | tail -n 1 | cut -d ' ' -f 2)
94 |
95 | git config --global user.email "robot@umbrella";
96 | git config --global user.name "robot";
97 | git commit -am "Release: $TAG";
98 |
99 | git tag "$TAG"
100 | git push && git push --tags
101 |
102 | CHANGELOG="$CHANGELOG
103 |
104 | Full Changelog: [$LAST_TAG...$TAG](https://github.com/ydb-platform/ydb-sqlalchemy/compare/$LAST_TAG...$TAG)"
105 | if [ "$WITH_BETA" = true ]
106 | then
107 | gh release create --prerelease $TAG --title "$TAG" --notes "$CHANGELOG"
108 | else
109 | gh release create $TAG --title "$TAG" --notes "$CHANGELOG"
110 | fi;
111 |
112 | - name: Publish package
113 | uses: pypa/gh-action-pypi-publish@release/v1.8
114 |
--------------------------------------------------------------------------------
/examples/alembic/alembic.ini:
--------------------------------------------------------------------------------
1 | # A generic, single database configuration.
2 |
3 | [alembic]
4 | # path to migration scripts
5 | # Use forward slashes (/) also on windows to provide an os agnostic path
6 | script_location = migrations
7 |
8 | # template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
9 | # Uncomment the line below if you want the files to be prepended with date and time
10 | # see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file
11 | # for all available tokens
12 | # file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
13 |
14 | # sys.path path, will be prepended to sys.path if present.
15 | # defaults to the current working directory.
16 | prepend_sys_path = .
17 |
18 | # timezone to use when rendering the date within the migration file
19 | # as well as the filename.
20 | # If specified, requires the python>=3.9 or backports.zoneinfo library.
21 | # Any required deps can installed by adding `alembic[tz]` to the pip requirements
22 | # string value is passed to ZoneInfo()
23 | # leave blank for localtime
24 | # timezone =
25 |
26 | # max length of characters to apply to the "slug" field
27 | # truncate_slug_length = 40
28 |
29 | # set to 'true' to run the environment during
30 | # the 'revision' command, regardless of autogenerate
31 | # revision_environment = false
32 |
33 | # set to 'true' to allow .pyc and .pyo files without
34 | # a source .py file to be detected as revisions in the
35 | # versions/ directory
36 | # sourceless = false
37 |
38 | # version location specification; This defaults
39 | # to ./versions. When using multiple version
40 | # directories, initial revisions must be specified with --version-path.
41 | # The path separator used here should be the separator specified by "version_path_separator" below.
42 | # version_locations = %(here)s/bar:%(here)s/bat:./versions
43 |
44 | # version path separator; As mentioned above, this is the character used to split
45 | # version_locations. The default within new alembic.ini files is "os", which uses os.pathsep.
46 | # If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas.
47 | # Valid values for version_path_separator are:
48 | #
49 | # version_path_separator = :
50 | # version_path_separator = ;
51 | # version_path_separator = space
52 | # version_path_separator = newline
53 | version_path_separator = os # Use os.pathsep. Default configuration used for new projects.
54 |
55 | # set to 'true' to search source files recursively
56 | # in each "version_locations" directory
57 | # new in Alembic version 1.10
58 | # recursive_version_locations = false
59 |
60 | # the output encoding used when revision files
61 | # are written from script.py.mako
62 | # output_encoding = utf-8
63 |
64 | sqlalchemy.url = yql+ydb://localhost:2136/local
65 |
66 |
67 | [post_write_hooks]
68 | # post_write_hooks defines scripts or Python functions that are run
69 | # on newly generated revision scripts. See the documentation for further
70 | # detail and examples
71 |
72 | # format using "black" - use the console_scripts runner, against the "black" entrypoint
73 | # hooks = black
74 | # black.type = console_scripts
75 | # black.entrypoint = black
76 | # black.options = -l 79 REVISION_SCRIPT_FILENAME
77 |
78 | # lint with attempts to fix using "ruff" - use the exec runner, execute a binary
79 | # hooks = ruff
80 | # ruff.type = exec
81 | # ruff.executable = %(here)s/.venv/bin/ruff
82 | # ruff.options = --fix REVISION_SCRIPT_FILENAME
83 |
84 | # Logging configuration
85 | [loggers]
86 | keys = root,sqlalchemy,alembic
87 |
88 | [handlers]
89 | keys = console
90 |
91 | [formatters]
92 | keys = generic
93 |
94 | [logger_root]
95 | level = WARNING
96 | handlers = console
97 | qualname =
98 |
99 | [logger_sqlalchemy]
100 | level = WARNING
101 | handlers =
102 | qualname = sqlalchemy.engine
103 |
104 | [logger_alembic]
105 | level = INFO
106 | handlers =
107 | qualname = alembic
108 |
109 | [handler_console]
110 | class = StreamHandler
111 | args = (sys.stderr,)
112 | level = NOTSET
113 | formatter = generic
114 |
115 | [formatter_generic]
116 | format = %(levelname)-5.5s [%(name)s] %(message)s
117 | datefmt = %H:%M:%S
118 |
--------------------------------------------------------------------------------
/ydb_sqlalchemy/sqlalchemy/types.py:
--------------------------------------------------------------------------------
1 | import decimal
2 | from typing import Any, Mapping, Type, Union
3 |
4 | from sqlalchemy import __version__ as sa_version
5 |
6 | if sa_version.startswith("2."):
7 | from sqlalchemy import ColumnElement
8 | else:
9 | from sqlalchemy.sql.expression import ColumnElement
10 |
11 | from sqlalchemy import ARRAY, exc, types
12 | from sqlalchemy.sql import type_api
13 |
14 | from .datetime_types import YqlDate, YqlDateTime, YqlTimestamp, YqlDate32, YqlTimestamp64, YqlDateTime64 # noqa: F401
15 | from .json import YqlJSON # noqa: F401
16 |
17 |
18 | class UInt64(types.Integer):
19 | __visit_name__ = "uint64"
20 |
21 |
22 | class UInt32(types.Integer):
23 | __visit_name__ = "uint32"
24 |
25 |
26 | class UInt16(types.Integer):
27 | __visit_name__ = "uint16"
28 |
29 |
30 | class UInt8(types.Integer):
31 | __visit_name__ = "uint8"
32 |
33 |
34 | class Int64(types.Integer):
35 | __visit_name__ = "int64"
36 |
37 |
38 | class Int32(types.Integer):
39 | __visit_name__ = "int32"
40 |
41 |
42 | class Int16(types.Integer):
43 | __visit_name__ = "int32"
44 |
45 |
46 | class Int8(types.Integer):
47 | __visit_name__ = "int8"
48 |
49 |
50 | class Decimal(types.DECIMAL):
51 | __visit_name__ = "DECIMAL"
52 |
53 | def __init__(self, precision=None, scale=None, asdecimal=True):
54 | # YDB supports Decimal(22,9) by default
55 | if precision is None:
56 | precision = 22
57 | if scale is None:
58 | scale = 9
59 | super().__init__(precision=precision, scale=scale, asdecimal=asdecimal)
60 |
61 | def bind_processor(self, dialect):
62 | def process(value):
63 | if value is None:
64 | return None
65 | # Convert float to Decimal if needed
66 | if isinstance(value, float):
67 | return decimal.Decimal(str(value))
68 | elif isinstance(value, str):
69 | return decimal.Decimal(value)
70 | elif not isinstance(value, decimal.Decimal):
71 | return decimal.Decimal(str(value))
72 | return value
73 |
74 | return process
75 |
76 | def result_processor(self, dialect, coltype):
77 | def process(value):
78 | if value is None:
79 | return None
80 |
81 | # YDB always returns Decimal values as decimal.Decimal objects
82 | # But if asdecimal=False, we should convert to float
83 | if not self.asdecimal:
84 | return float(value)
85 |
86 | # For asdecimal=True (default), return as Decimal
87 | if not isinstance(value, decimal.Decimal):
88 | return decimal.Decimal(str(value))
89 | return value
90 |
91 | return process
92 |
93 | def literal_processor(self, dialect):
94 | def process(value):
95 | # Convert float to Decimal if needed
96 | if isinstance(value, float):
97 | value = decimal.Decimal(str(value))
98 | elif not isinstance(value, decimal.Decimal):
99 | value = decimal.Decimal(str(value))
100 |
101 | # Use default precision and scale if not specified
102 | precision = self.precision if self.precision is not None else 22
103 | scale = self.scale if self.scale is not None else 9
104 |
105 | return f'Decimal("{str(value)}", {precision}, {scale})'
106 |
107 | return process
108 |
109 |
110 | class ListType(ARRAY):
111 | __visit_name__ = "list_type"
112 |
113 |
114 | class HashableDict(dict):
115 | def __hash__(self):
116 | return hash(tuple(self.items()))
117 |
118 |
119 | class StructType(types.TypeEngine[Mapping[str, Any]]):
120 | __visit_name__ = "struct_type"
121 |
122 | def __init__(self, fields_types: Mapping[str, Union[Type[types.TypeEngine], Type[types.TypeDecorator]]]):
123 | self.fields_types = HashableDict(dict(sorted(fields_types.items())))
124 |
125 | @property
126 | def python_type(self):
127 | return dict
128 |
129 | def compare_values(self, x, y):
130 | return x == y
131 |
132 |
133 | class Lambda(ColumnElement):
134 | __visit_name__ = "lambda"
135 |
136 | def __init__(self, func):
137 | if not callable(func):
138 | raise exc.ArgumentError("func must be callable")
139 |
140 | self.type = type_api.NULLTYPE
141 | self.func = func
142 |
--------------------------------------------------------------------------------
/docs/connection.rst:
--------------------------------------------------------------------------------
1 | Connection Configuration
2 | ========================
3 |
4 | This guide covers various ways to configure connections to YDB using SQLAlchemy.
5 |
6 | Connection URL Format
7 | ---------------------
8 |
9 | YDB SQLAlchemy uses the following URL format:
10 |
11 | .. code-block:: text
12 |
13 | yql+ydb://host:port/database
14 |
15 | Basic Examples:
16 |
17 | .. code-block:: python
18 |
19 | # Synchronous connection
20 | engine = sa.create_engine("yql+ydb://localhost:2136/local")
21 |
22 | # Asynchronous connection
23 | from sqlalchemy.ext.asyncio import create_async_engine
24 | async_engine = create_async_engine("yql+ydb_async://localhost:2136/local")
25 |
26 | # Remote YDB instance
27 | engine = sa.create_engine("yql+ydb://ydb.example.com:2135/prod")
28 | async_engine = create_async_engine("yql+ydb_async://ydb.example.com:2135/prod")
29 |
30 | # With database path
31 | engine = sa.create_engine("yql+ydb://localhost:2136/local/my_database")
32 | async_engine = create_async_engine("yql+ydb_async://localhost:2136/local/my_database")
33 |
34 | Authentication Methods
35 | ----------------------
36 |
37 | YDB SQLAlchemy supports multiple authentication methods through the ``connect_args`` parameter.
38 |
39 | Anonymous Access
40 | ~~~~~~~~~~~~~~~~
41 |
42 | For local development or testing:
43 |
44 | .. code-block:: python
45 |
46 | import sqlalchemy as sa
47 |
48 | engine = sa.create_engine("yql+ydb://localhost:2136/local")
49 |
50 | Static Credentials
51 | ~~~~~~~~~~~~~~~~~~
52 |
53 | Use username and password authentication:
54 |
55 | .. code-block:: python
56 |
57 | engine = sa.create_engine(
58 | "yql+ydb://localhost:2136/local",
59 | connect_args={
60 | "credentials": {
61 | "username": "your_username",
62 | "password": "your_password"
63 | }
64 | }
65 | )
66 |
67 | Token Authentication
68 | ~~~~~~~~~~~~~~~~~~~~
69 |
70 | Use access token for authentication:
71 |
72 | .. code-block:: python
73 |
74 | engine = sa.create_engine(
75 | "yql+ydb://localhost:2136/local",
76 | connect_args={
77 | "credentials": {
78 | "token": "your_access_token"
79 | }
80 | }
81 | )
82 |
83 | Service Account Authentication
84 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
85 |
86 | Use service account JSON key:
87 |
88 | .. code-block:: python
89 |
90 | engine = sa.create_engine(
91 | "yql+ydb://localhost:2136/local",
92 | connect_args={
93 | "credentials": {
94 | "service_account_json": {
95 | "id": "your_key_id",
96 | "service_account_id": "your_service_account_id",
97 | "created_at": "2023-01-01T00:00:00Z",
98 | "key_algorithm": "RSA_2048",
99 | "public_key": "-----BEGIN PUBLIC KEY-----\\n...\\n-----END PUBLIC KEY-----",
100 | "private_key": "-----BEGIN PRIVATE KEY-----\\n...\\n-----END PRIVATE KEY-----"
101 | }
102 | }
103 | }
104 | )
105 |
106 | Or load from file:
107 |
108 | .. code-block:: python
109 |
110 | import json
111 |
112 | with open('service_account_key.json', 'r') as f:
113 | service_account_json = json.load(f)
114 |
115 | engine = sa.create_engine(
116 | "yql+ydb://localhost:2136/local",
117 | connect_args={
118 | "credentials": {
119 | "service_account_json": service_account_json
120 | }
121 | }
122 | )
123 |
124 | YDB SDK Credentials
125 | ~~~~~~~~~~~~~~~~~~~
126 |
127 | Use any credentials from the YDB Python SDK:
128 |
129 | .. code-block:: python
130 |
131 | import ydb.iam
132 |
133 | engine = sa.create_engine(
134 | "yql+ydb://localhost:2136/local",
135 | connect_args={
136 | "credentials": ydb.iam.MetadataUrlCredentials()
137 | }
138 | )
139 |
140 | # OAuth token credentials
141 | engine = sa.create_engine(
142 | "yql+ydb://localhost:2136/local",
143 | connect_args={
144 | "credentials": ydb.iam.OAuthCredentials("your_oauth_token")
145 | }
146 | )
147 |
148 | # Static credentials
149 | engine = sa.create_engine(
150 | "yql+ydb://localhost:2136/local",
151 | connect_args={
152 | "credentials": ydb.iam.StaticCredentials("username", "password")
153 | }
154 | )
155 |
156 | TLS Configuration
157 | ---------------------
158 |
159 | For secure connections to YDB:
160 |
161 | .. code-block:: python
162 |
163 | engine = sa.create_engine(
164 | "yql+ydb://ydb.example.com:2135/prod",
165 | connect_args={
166 | "credentials": {"token": "your_token"},
167 | "protocol": "grpc",
168 | "root_certificates_path": "/path/to/ca-certificates.crt",
169 | # "root_certificates": crt_string,
170 | }
171 | )
172 |
--------------------------------------------------------------------------------
/.github/scripts/increment_version.py:
--------------------------------------------------------------------------------
1 | #!/bin/env python
2 | import argparse
3 | from dataclasses import dataclass
4 |
5 | from packaging.version import Version
6 |
7 | SETUP_PY_PATH = "setup.py"
8 | DEFAULT_CHANGELOG_PATH = "CHANGELOG.md"
9 | DEFAULT_YDB_VERSION_FILE = "ydb_sqlalchemy/_version.py"
10 | MARKER = "# AUTOVERSION"
11 |
12 |
13 | @dataclass(init=False)
14 | class VersionLine:
15 | old_line: str
16 | major: int
17 | minor: int
18 | patch: int
19 | pre: int
20 |
21 | def __init__(self, old_line: str, version_str: str):
22 | self.old_line = old_line
23 |
24 | version = Version(version_str)
25 | self.major = version.major
26 | self.minor = version.minor
27 | self.micro = version.micro
28 |
29 | if version.pre is None:
30 | self.pre = 0
31 | else:
32 | self.pre = version.pre[1]
33 |
34 | def increment(self, part_name: str, with_beta: bool):
35 | if part_name == 'minor':
36 | self.increment_minor(with_beta)
37 | elif part_name == 'patch' or part_name == 'micro':
38 | self.increment_micro(with_beta)
39 | else:
40 | raise Exception("unexpected increment type: '%s'" % part_name)
41 |
42 | def increment_minor(self, with_beta: bool):
43 | if with_beta:
44 | if self.pre == 0 or self.micro != 0:
45 | self.increment_minor(False)
46 | self.pre += 1
47 | return
48 |
49 | if self.micro == 0 and self.pre > 0:
50 | self.pre = 0
51 | return
52 |
53 | self.minor += 1
54 | self.micro = 0
55 | self.pre = 0
56 |
57 | def increment_micro(self, with_beta: bool):
58 | if with_beta:
59 | if self.pre == 0:
60 | self.increment_micro(False)
61 | self.pre += 1
62 | return
63 |
64 | if self.pre > 0:
65 | self.pre = 0
66 | return
67 |
68 | self.micro += 1
69 |
70 | def __str__(self):
71 | if self.pre > 0:
72 | pre = "b%s" % self.pre
73 | else:
74 | pre = ""
75 |
76 | return "%s.%s.%s%s" % (self.major, self.minor, self.micro, pre)
77 |
78 | def version_line_with_mark(self):
79 | return 'version="%s", %s' % (str(self), MARKER)
80 |
81 |
82 | def extract_version(setup_py_content: str):
83 | version_line = ""
84 | for line in setup_py_content.splitlines():
85 | if MARKER in line:
86 | version_line = line
87 | break
88 |
89 | if version_line == "":
90 | raise Exception("Not found version line")
91 |
92 | version_line = version_line.strip()
93 |
94 | parts = version_line.split('"')
95 | version_part = parts[1]
96 |
97 | return VersionLine(old_line=version_line, version_str=version_part)
98 |
99 |
100 | def increment_version_at_setup_py(setup_py_path: str, inc_type: str, with_beta: bool) -> str:
101 | with open(setup_py_path, "rt") as f:
102 | setup_content = f.read()
103 |
104 | version = extract_version(setup_content)
105 | version.increment(inc_type, with_beta)
106 | setup_content = setup_content.replace(version.old_line, version.version_line_with_mark())
107 |
108 | with open(setup_py_path, "w") as f:
109 | f.write(setup_content)
110 |
111 | return str(version)
112 |
113 |
114 | def add_changelog_version(changelog_path, version: str):
115 | with open(changelog_path, "rt") as f:
116 | content = f.read()
117 | content = content.strip()
118 |
119 | if content.startswith("##"):
120 | return
121 |
122 | content = """## %s ##
123 | %s
124 | """ % (version, content)
125 | with open(changelog_path, "w") as f:
126 | f.write(content)
127 |
128 |
129 | def set_version_in_ydb_version_file(file_path: str, version: str):
130 | with open(file_path, "w") as f:
131 | f.write('VERSION = "%s"\n' % version)
132 |
133 | def main():
134 | parser = argparse.ArgumentParser()
135 | parser.add_argument(
136 | "--inc-type",
137 | default="minor",
138 | help="increment version type: patch or minor",
139 | choices=["minor", "patch"],
140 | )
141 | parser.add_argument(
142 | "--beta",
143 | choices=["true", "false"],
144 | help="is beta version"
145 | )
146 | parser.add_argument("--changelog-path", default=DEFAULT_CHANGELOG_PATH, help="path to changelog", type=str)
147 | parser.add_argument("--setup-py-path", default=SETUP_PY_PATH)
148 |
149 | args = parser.parse_args()
150 |
151 | is_beta = args.beta == "true"
152 |
153 | new_version = increment_version_at_setup_py(args.setup_py_path, args.inc_type, is_beta)
154 | add_changelog_version(args.changelog_path, new_version)
155 | set_version_in_ydb_version_file(DEFAULT_YDB_VERSION_FILE, new_version)
156 | print(new_version)
157 |
158 |
159 | if __name__ == '__main__':
160 | main()
161 |
162 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # YDB Dialect for SQLAlchemy
2 | ---
3 | [](https://github.com/ydb-platform/ydb-sqlalchemy/blob/main/LICENSE)
4 | [](https://badge.fury.io/py/ydb-sqlalchemy)
5 | [](https://ydb-platform.github.io/ydb-sqlalchemy/api/index.html)
6 | [](https://github.com/ydb-platform/ydb-sqlalchemy/actions/workflows/tests.yml)
7 | [](https://github.com/ydb-platform/ydb-sqlalchemy/actions/workflows/style.yml)
8 |
9 | This repository contains YQL dialect for SqlAlchemy 2.0.
10 |
11 | ---
12 |
13 | **Documentation**: https://ydb-platform.github.io/ydb-sqlalchemy
14 |
15 | ---
16 |
17 | **Note**: Dialect also works with SqlAlchemy 1.4, but it is not fully tested.
18 |
19 |
20 | ## Installation
21 |
22 | ### Via PyPI
23 | To install ydb-sqlalchemy from PyPI:
24 |
25 | ```bash
26 | $ pip install ydb-sqlalchemy
27 | ```
28 |
29 | ### Installation from source code
30 | To work with current ydb-sqlalchemy version clone this repo and run from source root:
31 |
32 | ```bash
33 | $ pip install -U .
34 | ```
35 |
36 | ## Getting started
37 |
38 | Connect to local YDB using SqlAlchemy:
39 |
40 | ```python3
41 | import sqlalchemy as sa
42 |
43 |
44 | engine = sa.create_engine("yql+ydb://localhost:2136/local")
45 |
46 | with engine.connect() as conn:
47 | rs = conn.execute(sa.text("SELECT 1 AS value"))
48 | print(rs.fetchone())
49 |
50 | ```
51 |
52 | ## Authentication
53 |
54 | To specify credentials, you should pass `credentials` object to `connect_args` argument of `create_engine` method.
55 |
56 | ### Static Credentials
57 |
58 | To use static credentials you should specify `username` and `password` as follows:
59 |
60 | ```python3
61 | engine = sa.create_engine(
62 | "yql+ydb://localhost:2136/local",
63 | connect_args = {
64 | "credentials": {
65 | "username": "...",
66 | "password": "..."
67 | }
68 | }
69 | )
70 | ```
71 |
72 | ### Token Credentials
73 |
74 | To use access token credentials you should specify `token` as follows:
75 |
76 | ```python3
77 | engine = sa.create_engine(
78 | "yql+ydb://localhost:2136/local",
79 | connect_args = {
80 | "credentials": {
81 | "token": "..."
82 | }
83 | }
84 | )
85 | ```
86 |
87 | ### Service Account Credentials
88 |
89 | To use service account credentials you should specify `service_account_json` as follows:
90 |
91 | ```python3
92 | engine = sa.create_engine(
93 | "yql+ydb://localhost:2136/local",
94 | connect_args = {
95 | "credentials": {
96 | "service_account_json": {
97 | "id": "...",
98 | "service_account_id": "...",
99 | "created_at": "...",
100 | "key_algorithm": "...",
101 | "public_key": "...",
102 | "private_key": "..."
103 | }
104 | }
105 | }
106 | )
107 | ```
108 |
109 | ### Credentials from YDB SDK
110 |
111 | To use any credentials that comes with `ydb` package, just pass credentials object as follows:
112 |
113 | ```python3
114 | import ydb.iam
115 |
116 | engine = sa.create_engine(
117 | "yql+ydb://localhost:2136/local",
118 | connect_args = {
119 | "credentials": ydb.iam.MetadataUrlCredentials()
120 | }
121 | )
122 |
123 | ```
124 |
125 |
126 | ## Migrations
127 |
128 | To setup `alembic` to work with `YDB` please check [this example](https://github.com/ydb-platform/ydb-sqlalchemy/tree/main/examples/alembic).
129 |
130 | ## Development
131 |
132 | ### Run Tests:
133 |
134 | Run the command from the root directory of the repository to start YDB in a local docker container.
135 | ```bash
136 | $ docker-compose up
137 | ```
138 |
139 | To run all tests execute the command from the root directory of the repository:
140 | ```bash
141 | $ tox -e test-all
142 | ```
143 |
144 | Run specific test:
145 | ```bash
146 | $ tox -e test -- test/test_core.py
147 | ```
148 |
149 | Check code style:
150 | ```bash
151 | $ tox -e style
152 | ```
153 |
154 | Reformat code:
155 | ```bash
156 | $ tox -e isort
157 | $ tox -e black-format
158 | ```
159 |
160 | Run example (needs running local YDB):
161 | ```bash
162 | $ python -m pip install virtualenv
163 | $ virtualenv venv
164 | $ source venv/bin/activate
165 | $ pip install -r requirements.txt
166 | $ python examples/example.py
167 | ```
168 |
169 | ## Additional Notes
170 |
171 | ### Pandas
172 | It is possible to use YDB SA engine with `pandas` fuctions [to_sql()](https://pandas.pydata.org/pandas-docs/stable/reference/api/pandas.DataFrame.to_sql.html) and [read_sql](https://pandas.pydata.org/pandas-docs/stable/reference/api/pandas.read_sql.html). However, there are some limitations:
173 |
174 | * `to_sql` method can not be used with column tables, since it is impossible to specify `NOT NULL` columns with current `to_sql` arguments. YDB requires column tables to have `NOT NULL` attribute on `PK` columns.
175 |
176 | * `to_sql` is not fully optimized to load huge datasets. It is recommended to use `method="multi"` and avoid setting a very large `chunksize`.
177 |
178 | * `read_sql` is not fully optimized to load huge datasets and could lead to significant memory consumptions.
179 |
--------------------------------------------------------------------------------
/docs/quickstart.rst:
--------------------------------------------------------------------------------
1 | Quick Start
2 | ===========
3 |
4 | This guide will help you get started with YDB SQLAlchemy quickly. We'll cover basic usage patterns for both SQLAlchemy Core and ORM.
5 |
6 | Prerequisites
7 | -------------
8 |
9 | Before starting, make sure you have:
10 |
11 | 1. YDB SQLAlchemy installed (see :doc:`installation`)
12 | 2. A running YDB instance (local or remote)
13 | 3. Basic familiarity with SQLAlchemy
14 |
15 | Basic Connection
16 | ----------------
17 |
18 | The simplest way to connect to YDB:
19 |
20 | .. code-block:: python
21 |
22 | import sqlalchemy as sa
23 |
24 | # Create engine for local YDB
25 | engine = sa.create_engine("yql+ydb://localhost:2136/local")
26 |
27 | # Test connection
28 | with engine.connect() as conn:
29 | result = conn.execute(sa.text("SELECT 1 AS value"))
30 | print(result.fetchone()) # (1,)
31 |
32 | SQLAlchemy Core Example
33 | -----------------------
34 |
35 | Using SQLAlchemy Core for direct SQL operations:
36 |
37 | .. code-block:: python
38 |
39 | import sqlalchemy as sa
40 | from sqlalchemy import MetaData, Table, Column, Integer, String
41 |
42 | # Create engine
43 | engine = sa.create_engine("yql+ydb://localhost:2136/local")
44 |
45 | # Define table structure
46 | metadata = MetaData()
47 | users = Table(
48 | 'users',
49 | metadata,
50 | Column('id', Integer, primary_key=True),
51 | Column('name', String(50)),
52 | Column('email', String(100))
53 | )
54 |
55 | # Create table
56 | metadata.create_all(engine)
57 |
58 | # Insert data
59 | with engine.connect() as conn:
60 | # Single insert
61 | conn.execute(
62 | users.insert().values(id=1, name='John Doe', email='john@example.com')
63 | )
64 |
65 | # Multiple inserts
66 | conn.execute(
67 | users.insert(),
68 | [
69 | {'id': 2, 'name': 'Jane Smith', 'email': 'jane@example.com'},
70 | {'id': 3, 'name': 'Bob Johnson', 'email': 'bob@example.com'}
71 | ]
72 | )
73 |
74 | # Commit changes
75 | conn.commit()
76 |
77 | # Query data
78 | with engine.connect() as conn:
79 | # Select all
80 | result = conn.execute(sa.select(users))
81 | for row in result:
82 | print(f"ID: {row.id}, Name: {row.name}, Email: {row.email}")
83 |
84 | # Select with conditions
85 | result = conn.execute(
86 | sa.select(users).where(users.c.name.like('John%'))
87 | )
88 | print(result.fetchall())
89 |
90 | SQLAlchemy ORM Example
91 | ----------------------
92 |
93 | Using SQLAlchemy ORM for object-relational mapping:
94 |
95 | .. code-block:: python
96 |
97 | import sqlalchemy as sa
98 | from sqlalchemy import Column, Integer, String
99 | from sqlalchemy.orm import declarative_base
100 | from sqlalchemy.orm import sessionmaker
101 |
102 | # Create engine
103 | engine = sa.create_engine("yql+ydb://localhost:2136/local")
104 |
105 | # Define base class
106 | Base = declarative_base()
107 |
108 | # Define model
109 | class User(Base):
110 | __tablename__ = 'users_orm'
111 |
112 | id = Column(Integer, primary_key=True)
113 | name = Column(String(50))
114 | email = Column(String(100))
115 |
116 | def __repr__(self):
117 | return f""
118 |
119 | # Create tables
120 | Base.metadata.create_all(engine)
121 |
122 | # Create session
123 | Session = sessionmaker(bind=engine)
124 | session = Session()
125 |
126 | # Create and add users
127 | user1 = User(id=1, name='Alice Brown', email='alice@example.com')
128 | user2 = User(id=2, name='Charlie Davis', email='charlie@example.com')
129 |
130 | session.add_all([user1, user2])
131 | session.commit()
132 |
133 | # Query users
134 | users = session.query(User).all()
135 | for user in users:
136 | print(user)
137 |
138 | # Query with filters
139 | alice = session.query(User).filter(User.name == 'Alice Brown').first()
140 | print(f"Found user: {alice}")
141 |
142 | # Update user
143 | alice.email = 'alice.brown@example.com'
144 | session.commit()
145 |
146 | # Delete user
147 | session.delete(user2)
148 | session.commit()
149 |
150 | session.close()
151 |
152 | Working with YDB-Specific Features
153 | -----------------------------------
154 |
155 | YDB has some unique features that you can leverage:
156 |
157 | Upsert Operations
158 | ~~~~~~~~~~~~~~~~~
159 |
160 | YDB supports efficient upsert operations:
161 |
162 | .. code-block:: python
163 |
164 | from ydb_sqlalchemy.sqlalchemy import upsert
165 |
166 | # Using upsert instead of insert
167 | with engine.connect() as conn:
168 | stmt = upsert(users).values(
169 | id=1,
170 | name='John Updated',
171 | email='john.updated@example.com'
172 | )
173 | conn.execute(stmt)
174 | conn.commit()
175 |
176 | YDB-Specific Types
177 | ~~~~~~~~~~~~~~~~~~
178 |
179 | Use YDB-specific data types for better performance:
180 |
181 | .. code-block:: python
182 |
183 | from ydb_sqlalchemy.sqlalchemy.types import UInt64, YqlJSON
184 |
185 | # Table with YDB-specific types
186 | ydb_table = Table(
187 | 'ydb_example',
188 | metadata,
189 | Column('id', UInt64, primary_key=True),
190 | Column('data', YqlJSON),
191 | Column('created_at', sa.DateTime)
192 | )
193 |
194 | Next Steps
195 | ----------
196 |
197 | Now that you have the basics working:
198 |
199 | 1. Learn about :doc:`connection` configuration and authentication
200 | 2. Explore :doc:`types` for YDB-specific data types
201 | 3. Set up :doc:`migrations` with Alembic
202 | 4. Check out the examples in the repository
203 |
204 | Common Patterns
205 | ---------------
206 |
207 | Here are some common patterns you'll use frequently:
208 |
209 | .. code-block:: python
210 |
211 | # Counting records
212 | count = conn.execute(sa.func.count(users.c.id)).scalar()
213 |
214 | # Aggregations
215 | result = conn.execute(
216 | sa.select(sa.func.max(users.c.id), sa.func.count())
217 | .select_from(users)
218 | )
219 |
220 | # Joins (when you have related tables)
221 | # result = conn.execute(
222 | # sa.select(users, orders)
223 | # .select_from(users.join(orders, users.c.id == orders.c.user_id))
224 | # )
225 |
--------------------------------------------------------------------------------
/examples/basic_example/example.py:
--------------------------------------------------------------------------------
1 | import datetime
2 | import logging
3 |
4 | import sqlalchemy as sa
5 | from fill_tables import fill_all_tables, to_days
6 | from models import Base, Episodes, Series
7 | from sqlalchemy import TIMESTAMP, Column, Float, Integer, String, Table, exc, orm, sql
8 |
9 |
10 | def describe_table(engine, name):
11 | inspect = sa.inspect(engine)
12 | print(f"describe table {name}:")
13 | for col in inspect.get_columns(name):
14 | print(f"\t{col['name']}: {col['type']}")
15 |
16 |
17 | def simple_select(conn):
18 | stm = sa.select(Series).where(Series.series_id == 1)
19 | res = conn.execute(stm)
20 | print(res.one())
21 |
22 |
23 | def simple_insert(conn):
24 | stm = Episodes.__table__.insert().values(series_id=3, season_id=6, episode_id=1, title="TBD")
25 | conn.execute(stm)
26 |
27 |
28 | def test_types(conn):
29 | types_tb = Table(
30 | "test_types",
31 | Base.metadata,
32 | Column("id", Integer, primary_key=True),
33 | Column("str", String),
34 | Column("num", Float),
35 | Column("dt", TIMESTAMP),
36 | )
37 | types_tb.drop(bind=conn.engine, checkfirst=True)
38 | types_tb.create(bind=conn.engine, checkfirst=True)
39 |
40 | stm = types_tb.insert().values(
41 | id=1,
42 | str="Hello World!",
43 | num=3.1415,
44 | dt=datetime.datetime.now(),
45 | )
46 | conn.execute(stm)
47 |
48 | # GROUP BY
49 | stm = sa.select(types_tb.c.str, sa.func.max(types_tb.c.num)).group_by(types_tb.c.str)
50 | rs = conn.execute(stm)
51 | for x in rs:
52 | print(x)
53 |
54 |
55 | def run_example_orm(engine):
56 | Base.metadata.drop_all(bind=engine)
57 | Base.metadata.create_all(bind=engine)
58 |
59 | session = orm.sessionmaker(bind=engine)()
60 |
61 | rs = session.query(Episodes).all()
62 | for e in rs:
63 | print(f"{e.episode_id}: {e.title}")
64 |
65 | fill_all_tables(session.connection())
66 |
67 | try:
68 | session.add_all(
69 | [
70 | Episodes(
71 | series_id=2,
72 | season_id=1,
73 | episode_id=1,
74 | title="Minimum Viable Product",
75 | air_date=to_days("2014-04-06"),
76 | ),
77 | Episodes(
78 | series_id=2,
79 | season_id=1,
80 | episode_id=2,
81 | title="The Cap Table",
82 | air_date=to_days("2014-04-13"),
83 | ),
84 | Episodes(
85 | series_id=2,
86 | season_id=1,
87 | episode_id=3,
88 | title="Articles of Incorporation",
89 | air_date=to_days("2014-04-20"),
90 | ),
91 | Episodes(
92 | series_id=2,
93 | season_id=1,
94 | episode_id=4,
95 | title="Fiduciary Duties",
96 | air_date=to_days("2014-04-27"),
97 | ),
98 | Episodes(
99 | series_id=2,
100 | season_id=1,
101 | episode_id=5,
102 | title="Signaling Risk",
103 | air_date=to_days("2014-05-04"),
104 | ),
105 | ]
106 | )
107 | session.commit()
108 | except exc.DatabaseError:
109 | print("Episodes already added!")
110 | session.rollback()
111 |
112 | rs = session.query(Episodes).all()
113 | for e in rs:
114 | print(f"{e.episode_id}: {e.title}")
115 |
116 | rs = session.query(Episodes).filter(Episodes.title == "abc??").all()
117 | for e in rs:
118 | print(e.title)
119 |
120 | print("Episodes count:", session.query(Episodes).count())
121 |
122 | max_episode = session.query(sql.expression.func.max(Episodes.episode_id)).scalar()
123 | print("Maximum episodes id:", max_episode)
124 |
125 | session.add(
126 | Episodes(
127 | series_id=2,
128 | season_id=1,
129 | episode_id=max_episode + 1,
130 | title="Signaling Risk",
131 | air_date=to_days("2014-05-04"),
132 | )
133 | )
134 |
135 | print("Episodes count:", session.query(Episodes).count())
136 |
137 |
138 | def run_example_core(engine):
139 | with engine.connect() as conn:
140 | # raw sql
141 | rs = conn.execute(sa.text("SELECT 1 AS value"))
142 | print(rs.fetchone())
143 |
144 | fill_all_tables(conn)
145 |
146 | for t in "series seasons episodes".split():
147 | describe_table(engine, t)
148 |
149 | tb = sa.Table("episodes", sa.MetaData(), autoload_with=engine)
150 | stm = (
151 | sa.select(tb.c.title)
152 | .where(sa.and_(tb.c.series_id == 1, tb.c.season_id == 3))
153 | .where(tb.c.title.like("%"))
154 | .order_by(sa.asc(tb.c.title))
155 | # TODO: limit isn't working now
156 | # .limit(3)
157 | )
158 | rs = conn.execute(stm)
159 | print(rs.fetchall())
160 |
161 | simple_select(conn)
162 |
163 | simple_insert(conn)
164 |
165 | # simple join
166 | stm = sa.select(Episodes.__table__.join(Series, Episodes.series_id == Series.series_id)).where(
167 | sa.and_(Series.series_id == 1, Episodes.season_id == 1)
168 | )
169 | rs = conn.execute(stm)
170 | for row in rs:
171 | print(f"{row.series_title}({row.episode_id}): {row.title}")
172 |
173 | rs = conn.execute(sa.select(Episodes).where(Episodes.series_id == 3))
174 | print(rs.fetchall())
175 |
176 | # count
177 | cnt = conn.execute(sa.func.count(Episodes.episode_id)).scalar()
178 | print("Episodes cnt:", cnt)
179 |
180 | # simple delete
181 | conn.execute(sa.delete(Episodes).where(Episodes.title == "TBD"))
182 | cnt = conn.execute(sa.func.count(Episodes.episode_id)).scalar()
183 | print("Episodes cnt:", cnt)
184 |
185 | test_types(conn)
186 |
187 |
188 | def main():
189 | engine = sa.create_engine("yql+ydb://localhost:2136/local")
190 |
191 | logging.basicConfig(level=logging.INFO)
192 | logging.getLogger("_sqlalchemy.engine").setLevel(logging.INFO)
193 |
194 | run_example_core(engine)
195 | # run_example_orm(engine)
196 |
197 |
198 | if __name__ == "__main__":
199 | main()
200 |
--------------------------------------------------------------------------------
/docs/conf.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | #
3 | # Configuration file for the Sphinx documentation builder.
4 | #
5 | # This file does only contain a selection of the most common options. For a
6 | # full list see the documentation:
7 | # http://www.sphinx-doc.org/en/master/config
8 |
9 | # -- Path setup --------------------------------------------------------------
10 |
11 | # If extensions (or modules to document with autodoc) are in another directory,
12 | # add these directories to sys.path here. If the directory is relative to the
13 | # documentation root, use os.path.abspath to make it absolute, like shown here.
14 | #
15 | import os
16 | import sys
17 | sys.path.insert(0, os.path.abspath('..'))
18 |
19 |
20 | # -- Project information -----------------------------------------------------
21 |
22 | project = 'YDB SQLAlchemy'
23 | copyright = '2025, Yandex'
24 | author = 'Yandex'
25 |
26 | # The short X.Y version
27 | version = '0.1'
28 | # The full version, including alpha/beta/rc tags
29 | release = '0.1.9'
30 |
31 |
32 | # -- General configuration ---------------------------------------------------
33 |
34 | # If your documentation needs a minimal Sphinx version, state it here.
35 | #
36 | # needs_sphinx = '1.0'
37 |
38 | # Add any Sphinx extension module names here, as strings. They can be
39 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
40 | # ones.
41 | extensions = [
42 | 'sphinx.ext.autodoc',
43 | 'sphinx.ext.viewcode',
44 | 'sphinx.ext.todo',
45 | 'sphinx.ext.napoleon',
46 | 'sphinx.ext.coverage',
47 | 'sphinx.ext.intersphinx',
48 | 'sphinx.ext.githubpages',
49 | 'sphinx_copybutton',
50 | ]
51 |
52 | # Add any paths that contain templates here, relative to this directory.
53 | templates_path = ['.templates']
54 |
55 | # The suffix(es) of source filenames.
56 | # You can specify multiple suffix as a list of string:
57 | #
58 | # source_suffix = ['.rst', '.md']
59 | source_suffix = '.rst'
60 |
61 | # The master toctree document.
62 | master_doc = 'index'
63 |
64 | # The language for content autogenerated by Sphinx. Refer to documentation
65 | # for a list of supported languages.
66 | #
67 | # This is also used if you do content translation via gettext catalogs.
68 | # Usually you set "language" from the command line for these cases.
69 | language = None
70 |
71 | # List of patterns, relative to source directory, that match files and
72 | # directories to ignore when looking for source files.
73 | # This pattern also affects html_static_path and html_extra_path.
74 | exclude_patterns = ['.build', 'Thumbs.db', '.DS_Store']
75 |
76 | # The name of the Pygments (syntax highlighting) style to use.
77 | pygments_style = None
78 |
79 |
80 | # -- Options for HTML output -------------------------------------------------
81 |
82 | # The theme to use for HTML and HTML Help pages. See the documentation for
83 | # a list of builtin themes.
84 | #
85 | html_theme = 'sphinx_rtd_theme'
86 |
87 | html_theme_options = {
88 | 'fixed_sidebar': True,
89 | 'page_width': '1140px',
90 | 'show_related': True,
91 | 'show_powered_by': False
92 | }
93 |
94 | html_logo = '_static/logo.svg'
95 | html_favicon = '_static/logo.svg'
96 |
97 | html_show_sourcelink = False
98 |
99 | # Theme options are theme-specific and customize the look and feel of a theme
100 | # further. For a list of options available for each theme, see the
101 | # documentation.
102 | #
103 | # html_theme_options = {}
104 |
105 | # Add any paths that contain custom static files (such as style sheets) here,
106 | # relative to this directory. They are copied after the builtin static files,
107 | # so a file named "default.css" will overwrite the builtin "default.css".
108 | html_static_path = ['_static']
109 |
110 | # Custom sidebar templates, must be a dictionary that maps document names
111 | # to template names.
112 | #
113 | # The default sidebars (for documents that don't match any pattern) are
114 | # defined by theme itself. Builtin themes are using these templates by
115 | # default: ``['localtoc.html', 'relations.html', 'sourcelink.html',
116 | # 'searchbox.html']``.
117 | #
118 | # html_sidebars = {}
119 |
120 |
121 | # -- Options for HTMLHelp output ---------------------------------------------
122 |
123 | # Output file base name for HTML help builder.
124 | htmlhelp_basename = 'ydb-sqlalchemy-doc'
125 |
126 |
127 | # -- Options for LaTeX output ------------------------------------------------
128 |
129 | latex_elements = {
130 | # The paper size ('letterpaper' or 'a4paper').
131 | #
132 | # 'papersize': 'letterpaper',
133 |
134 | # The font size ('10pt', '11pt' or '12pt').
135 | #
136 | # 'pointsize': '10pt',
137 |
138 | # Additional stuff for the LaTeX preamble.
139 | #
140 | # 'preamble': '',
141 |
142 | # Latex figure (float) alignment
143 | #
144 | # 'figure_align': 'htbp',
145 | }
146 |
147 | # Grouping the document tree into LaTeX files. List of tuples
148 | # (source start file, target name, title,
149 | # author, documentclass [howto, manual, or own class]).
150 | latex_documents = [
151 | (master_doc, 'ydb-sqlalchemy.tex', 'YDB SQLAlchemy Documentation',
152 | 'Yandex', 'manual'),
153 | ]
154 |
155 |
156 | # -- Options for manual page output ------------------------------------------
157 |
158 | # One entry per manual page. List of tuples
159 | # (source start file, name, description, authors, manual section).
160 | man_pages = [
161 | (master_doc, 'ydb-sqlalchemy', 'YDB SQLAlchemy Documentation',
162 | [author], 1)
163 | ]
164 |
165 |
166 | # -- Options for Texinfo output ----------------------------------------------
167 |
168 | # Grouping the document tree into Texinfo files. List of tuples
169 | # (source start file, target name, title, author,
170 | # dir menu entry, description, category)
171 | texinfo_documents = [
172 | (master_doc, 'ydb-sqlalchemy', 'YDB SQLAlchemy Documentation',
173 | author, 'ydb-sqlalchemy', 'SQLAlchemy dialect for YDB (Yandex Database).',
174 | 'Miscellaneous'),
175 | ]
176 |
177 |
178 | # -- Options for Epub output -------------------------------------------------
179 |
180 | # Bibliographic Dublin Core info.
181 | epub_title = project
182 |
183 | # The unique identifier of the text. This can be a ISBN number
184 | # or the project homepage.
185 | #
186 | # epub_identifier = ''
187 |
188 | # A unique identification for the text.
189 | #
190 | # epub_uid = ''
191 |
192 | # A list of files that should not be packed into the epub file.
193 | epub_exclude_files = ['search.html']
194 |
195 |
196 | # -- Extension configuration -------------------------------------------------
197 |
198 | autoclass_content = "both"
199 | autodoc_typehints = "both"
200 | autodoc_default_options = {
201 | 'undoc-members': True,
202 | 'member-order': 'bysource'
203 | }
204 |
205 | # -- Intersphinx configuration -----------------------------------------------
206 |
207 | intersphinx_mapping = {
208 | 'python': ('https://docs.python.org/3', None),
209 | 'sqlalchemy': ('https://docs.sqlalchemy.org/en/20/', None),
210 | 'alembic': ('https://alembic.sqlalchemy.org/en/latest/', None),
211 | }
212 |
213 | # -- Copy button configuration --------------------------------------------
214 |
215 | copybutton_prompt_text = r">>> |\.\.\. |\$ |In \[\d*\]: | {2,5}\.\.\.: | {5,8}: "
216 | copybutton_prompt_is_regexp = True
217 |
--------------------------------------------------------------------------------
/docs/types.rst:
--------------------------------------------------------------------------------
1 | Data Types
2 | ==========
3 |
4 | YDB SQLAlchemy provides comprehensive support for YDB data types through custom SQLAlchemy types. This guide covers the available types and their usage.
5 |
6 | Overview
7 | --------
8 |
9 | YDB has a rich type system that includes primitive types, optional types, containers, and special types. The YDB SQLAlchemy dialect maps these types to appropriate SQLAlchemy types and provides YDB-specific types for optimal performance.
10 | For more information about YDB data types, see the `YDB Type System Documentation `_.
11 |
12 | Type Mapping Summary
13 | --------------------
14 |
15 | The following table shows the complete mapping between YDB native types, YDB SQLAlchemy types, standard SQLAlchemy types, and Python types:
16 |
17 | .. list-table:: YDB Type System Reference
18 | :header-rows: 1
19 | :widths: 15 20 20 15 30
20 |
21 | * - YDB Native Type
22 | - YDB SA Type
23 | - SA Type
24 | - Python Type
25 | - Notes
26 | * - ``Bool``
27 | -
28 | - ``Boolean``
29 | - ``bool``
30 | -
31 | * - ``Int8``
32 | - :class:`~ydb_sqlalchemy.sqlalchemy.types.Int8`
33 | -
34 | - ``int``
35 | - -2^7 to 2^7-1
36 | * - ``Int16``
37 | - :class:`~ydb_sqlalchemy.sqlalchemy.types.Int16`
38 | -
39 | - ``int``
40 | - -2^15 to 2^15-1
41 | * - ``Int32``
42 | - :class:`~ydb_sqlalchemy.sqlalchemy.types.Int32`
43 | -
44 | - ``int``
45 | - -2^31 to 2^31-1
46 | * - ``Int64``
47 | - :class:`~ydb_sqlalchemy.sqlalchemy.types.Int64`
48 | - ``Integer``
49 | - ``int``
50 | - -2^63 to 2^63-1, default integer type
51 | * - ``Uint8``
52 | - :class:`~ydb_sqlalchemy.sqlalchemy.types.UInt8`
53 | -
54 | - ``int``
55 | - 0 to 2^8-1
56 | * - ``Uint16``
57 | - :class:`~ydb_sqlalchemy.sqlalchemy.types.UInt16`
58 | -
59 | - ``int``
60 | - 0 to 2^16-1
61 | * - ``Uint32``
62 | - :class:`~ydb_sqlalchemy.sqlalchemy.types.UInt32`
63 | -
64 | - ``int``
65 | - 0 to 2^32-1
66 | * - ``Uint64``
67 | - :class:`~ydb_sqlalchemy.sqlalchemy.types.UInt64`
68 | -
69 | - ``int``
70 | - 0 to 2^64-1
71 | * - ``Float``
72 | -
73 | - ``Float``
74 | - ``float``
75 | -
76 | * - ``Double``
77 | -
78 | - ``Double``
79 | - ``float``
80 | - Available in SQLAlchemy 2.0+
81 | * - ``Decimal(p,s)``
82 | - :class:`~ydb_sqlalchemy.sqlalchemy.types.Decimal`
83 | - ``DECIMAL``
84 | - ``decimal.Decimal``
85 | -
86 | * - ``String``
87 | -
88 | - ``BINARY``
89 | - ``bytes``
90 | -
91 | * - ``Utf8``
92 | -
93 | - ``String`` / ``Text``
94 | - ``str``
95 | -
96 | * - ``Date``
97 | - :class:`~ydb_sqlalchemy.sqlalchemy.datetime_types.YqlDate`
98 | - ``Date``
99 | - ``datetime.date``
100 | -
101 | * - ``Date32``
102 | - :class:`~ydb_sqlalchemy.sqlalchemy.datetime_types.YqlDate32`
103 | -
104 | - ``datetime.date``
105 | - Extended date range support
106 | * - ``Datetime``
107 | - :class:`~ydb_sqlalchemy.sqlalchemy.datetime_types.YqlDateTime`
108 | - ``DATETIME``
109 | - ``datetime.datetime``
110 | -
111 | * - ``Datetime64``
112 | - :class:`~ydb_sqlalchemy.sqlalchemy.datetime_types.YqlDateTime64`
113 | -
114 | - ``datetime.datetime``
115 | - Extended datetime range
116 | * - ``Timestamp``
117 | - :class:`~ydb_sqlalchemy.sqlalchemy.datetime_types.YqlTimestamp`
118 | - ``TIMESTAMP``
119 | - ``datetime.datetime``
120 | -
121 | * - ``Timestamp64``
122 | - :class:`~ydb_sqlalchemy.sqlalchemy.datetime_types.YqlTimestamp64`
123 | -
124 | - ``datetime.datetime``
125 | - Extended timestamp range
126 | * - ``Json``
127 | - :class:`~ydb_sqlalchemy.sqlalchemy.json.YqlJSON`
128 | - ``JSON``
129 | - ``dict`` / ``list``
130 | -
131 | * - ``List``
132 | - :class:`~ydb_sqlalchemy.sqlalchemy.types.ListType`
133 | - ``ARRAY``
134 | - ``list``
135 | -
136 | * - ``Struct<...>``
137 | - :class:`~ydb_sqlalchemy.sqlalchemy.types.StructType`
138 | -
139 | - ``dict``
140 | -
141 | * - ``Optional``
142 | -
143 | - ``nullable=True``
144 | - ``None`` + base type
145 | -
146 |
147 | Standard SQLAlchemy Types
148 | -------------------------
149 |
150 | Most standard SQLAlchemy types work with YDB:
151 |
152 | .. code-block:: python
153 |
154 | from sqlalchemy import Column, Integer, String, Boolean, Float, Text
155 |
156 | class MyTable(Base):
157 | __tablename__ = 'my_table'
158 |
159 | id = Column(Integer, primary_key=True)
160 | name = Column(String(100))
161 | description = Column(Text)
162 | is_active = Column(Boolean)
163 | price = Column(Float)
164 |
165 | YDB-Specific Integer Types
166 | --------------------------
167 |
168 | YDB provides specific integer types with defined bit widths:
169 |
170 | .. code-block:: python
171 |
172 | from ydb_sqlalchemy.sqlalchemy.types import (
173 | Int8, Int16, Int32, Int64,
174 | UInt8, UInt16, UInt32, UInt64
175 | )
176 |
177 | class IntegerTypesExample(Base):
178 | __tablename__ = 'integer_types'
179 |
180 | id = Column(UInt64, primary_key=True) # Unsigned 64-bit integer
181 | small_int = Column(Int16) # Signed 16-bit integer
182 | byte_value = Column(UInt8) # Unsigned 8-bit integer (0-255)
183 | counter = Column(UInt32) # Unsigned 32-bit integer
184 |
185 | For detailed API reference, see:
186 | :class:`~ydb_sqlalchemy.sqlalchemy.types.Int8`, :class:`~ydb_sqlalchemy.sqlalchemy.types.Int16`, :class:`~ydb_sqlalchemy.sqlalchemy.types.Int32`, :class:`~ydb_sqlalchemy.sqlalchemy.types.Int64`,
187 | :class:`~ydb_sqlalchemy.sqlalchemy.types.UInt8`, :class:`~ydb_sqlalchemy.sqlalchemy.types.UInt16`, :class:`~ydb_sqlalchemy.sqlalchemy.types.UInt32`, :class:`~ydb_sqlalchemy.sqlalchemy.types.UInt64`.
188 |
189 | Decimal Type
190 | ------------
191 |
192 | YDB supports high-precision decimal numbers:
193 |
194 | .. code-block:: python
195 |
196 | from ydb_sqlalchemy.sqlalchemy.types import Decimal
197 | import decimal
198 |
199 | class FinancialData(Base):
200 | __tablename__ = 'financial_data'
201 |
202 | id = Column(UInt64, primary_key=True)
203 | # Default: Decimal(22, 9) - 22 digits total, 9 after decimal point
204 | amount = Column(Decimal())
205 |
206 | # Custom precision and scale
207 | precise_amount = Column(Decimal(precision=15, scale=4))
208 |
209 | # Return as float instead of Decimal object
210 | percentage = Column(Decimal(precision=5, scale=2, asdecimal=False))
211 |
212 | # Usage
213 | session.add(FinancialData(
214 | id=1,
215 | amount=decimal.Decimal('1234567890123.123456789'),
216 | precise_amount=decimal.Decimal('12345678901.1234'),
217 | percentage=99.99
218 | ))
219 |
220 | For detailed API reference, see: :class:`~ydb_sqlalchemy.sqlalchemy.types.Decimal`.
221 |
222 | Date and Time Types
223 | -------------------
224 |
225 | YDB provides several date and time types:
226 |
227 | .. code-block:: python
228 |
229 | from ydb_sqlalchemy.sqlalchemy.types import (
230 | YqlDate, YqlDateTime, YqlTimestamp,
231 | YqlDate32, YqlDateTime64, YqlTimestamp64
232 | )
233 | from sqlalchemy import DateTime
234 | import datetime
235 |
236 | class EventLog(Base):
237 | __tablename__ = 'event_log'
238 |
239 | id = Column(UInt64, primary_key=True)
240 |
241 | # Date only (YYYY-MM-DD) - standard range
242 | event_date = Column(YqlDate)
243 |
244 | # Date32 - extended date range support
245 | extended_date = Column(YqlDate32)
246 |
247 | # DateTime with timezone support - standard range
248 | created_at = Column(YqlDateTime(timezone=True))
249 |
250 | # DateTime64 - extended range
251 | precise_datetime = Column(YqlDateTime64(timezone=True))
252 |
253 | # Timestamp (high precision) - standard range
254 | precise_time = Column(YqlTimestamp)
255 |
256 | # Timestamp64 - extended range with microsecond precision
257 | extended_timestamp = Column(YqlTimestamp64)
258 |
259 | # Standard SQLAlchemy DateTime also works
260 | updated_at = Column(DateTime)
261 |
262 | # Usage
263 | now = datetime.datetime.now(datetime.timezone.utc)
264 | today = datetime.date.today()
265 |
266 | session.add(EventLog(
267 | id=1,
268 | event_date=today,
269 | extended_date=today,
270 | created_at=now,
271 | precise_datetime=now,
272 | precise_time=now,
273 | extended_timestamp=now,
274 | updated_at=now
275 | ))
276 |
277 | For detailed API reference, see:
278 | :class:`~ydb_sqlalchemy.sqlalchemy.datetime_types.YqlDate`, :class:`~ydb_sqlalchemy.sqlalchemy.datetime_types.YqlDateTime`, :class:`~ydb_sqlalchemy.sqlalchemy.datetime_types.YqlTimestamp`,
279 | :class:`~ydb_sqlalchemy.sqlalchemy.datetime_types.YqlDate32`, :class:`~ydb_sqlalchemy.sqlalchemy.datetime_types.YqlDateTime64`, :class:`~ydb_sqlalchemy.sqlalchemy.datetime_types.YqlTimestamp64`.
280 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Apache License
2 | Version 2.0, January 2004
3 | http://www.apache.org/licenses/
4 |
5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
6 |
7 | 1. Definitions.
8 |
9 | "License" shall mean the terms and conditions for use, reproduction,
10 | and distribution as defined by Sections 1 through 9 of this document.
11 |
12 | "Licensor" shall mean the copyright owner or entity authorized by
13 | the copyright owner that is granting the License.
14 |
15 | "Legal Entity" shall mean the union of the acting entity and all
16 | other entities that control, are controlled by, or are under common
17 | control with that entity. For the purposes of this definition,
18 | "control" means (i) the power, direct or indirect, to cause the
19 | direction or management of such entity, whether by contract or
20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
21 | outstanding shares, or (iii) beneficial ownership of such entity.
22 |
23 | "You" (or "Your") shall mean an individual or Legal Entity
24 | exercising permissions granted by this License.
25 |
26 | "Source" form shall mean the preferred form for making modifications,
27 | including but not limited to software source code, documentation
28 | source, and configuration files.
29 |
30 | "Object" form shall mean any form resulting from mechanical
31 | transformation or translation of a Source form, including but
32 | not limited to compiled object code, generated documentation,
33 | and conversions to other media types.
34 |
35 | "Work" shall mean the work of authorship, whether in Source or
36 | Object form, made available under the License, as indicated by a
37 | copyright notice that is included in or attached to the work
38 | (an example is provided in the Appendix below).
39 |
40 | "Derivative Works" shall mean any work, whether in Source or Object
41 | form, that is based on (or derived from) the Work and for which the
42 | editorial revisions, annotations, elaborations, or other modifications
43 | represent, as a whole, an original work of authorship. For the purposes
44 | of this License, Derivative Works shall not include works that remain
45 | separable from, or merely link (or bind by name) to the interfaces of,
46 | the Work and Derivative Works thereof.
47 |
48 | "Contribution" shall mean any work of authorship, including
49 | the original version of the Work and any modifications or additions
50 | to that Work or Derivative Works thereof, that is intentionally
51 | submitted to Licensor for inclusion in the Work by the copyright owner
52 | or by an individual or Legal Entity authorized to submit on behalf of
53 | the copyright owner. For the purposes of this definition, "submitted"
54 | means any form of electronic, verbal, or written communication sent
55 | to the Licensor or its representatives, including but not limited to
56 | communication on electronic mailing lists, source code control systems,
57 | and issue tracking systems that are managed by, or on behalf of, the
58 | Licensor for the purpose of discussing and improving the Work, but
59 | excluding communication that is conspicuously marked or otherwise
60 | designated in writing by the copyright owner as "Not a Contribution."
61 |
62 | "Contributor" shall mean Licensor and any individual or Legal Entity
63 | on behalf of whom a Contribution has been received by Licensor and
64 | subsequently incorporated within the Work.
65 |
66 | 2. Grant of Copyright License. Subject to the terms and conditions of
67 | this License, each Contributor hereby grants to You a perpetual,
68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
69 | copyright license to reproduce, prepare Derivative Works of,
70 | publicly display, publicly perform, sublicense, and distribute the
71 | Work and such Derivative Works in Source or Object form.
72 |
73 | 3. Grant of Patent License. Subject to the terms and conditions of
74 | this License, each Contributor hereby grants to You a perpetual,
75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
76 | (except as stated in this section) patent license to make, have made,
77 | use, offer to sell, sell, import, and otherwise transfer the Work,
78 | where such license applies only to those patent claims licensable
79 | by such Contributor that are necessarily infringed by their
80 | Contribution(s) alone or by combination of their Contribution(s)
81 | with the Work to which such Contribution(s) was submitted. If You
82 | institute patent litigation against any entity (including a
83 | cross-claim or counterclaim in a lawsuit) alleging that the Work
84 | or a Contribution incorporated within the Work constitutes direct
85 | or contributory patent infringement, then any patent licenses
86 | granted to You under this License for that Work shall terminate
87 | as of the date such litigation is filed.
88 |
89 | 4. Redistribution. You may reproduce and distribute copies of the
90 | Work or Derivative Works thereof in any medium, with or without
91 | modifications, and in Source or Object form, provided that You
92 | meet the following conditions:
93 |
94 | (a) You must give any other recipients of the Work or
95 | Derivative Works a copy of this License; and
96 |
97 | (b) You must cause any modified files to carry prominent notices
98 | stating that You changed the files; and
99 |
100 | (c) You must retain, in the Source form of any Derivative Works
101 | that You distribute, all copyright, patent, trademark, and
102 | attribution notices from the Source form of the Work,
103 | excluding those notices that do not pertain to any part of
104 | the Derivative Works; and
105 |
106 | (d) If the Work includes a "NOTICE" text file as part of its
107 | distribution, then any Derivative Works that You distribute must
108 | include a readable copy of the attribution notices contained
109 | within such NOTICE file, excluding those notices that do not
110 | pertain to any part of the Derivative Works, in at least one
111 | of the following places: within a NOTICE text file distributed
112 | as part of the Derivative Works; within the Source form or
113 | documentation, if provided along with the Derivative Works; or,
114 | within a display generated by the Derivative Works, if and
115 | wherever such third-party notices normally appear. The contents
116 | of the NOTICE file are for informational purposes only and
117 | do not modify the License. You may add Your own attribution
118 | notices within Derivative Works that You distribute, alongside
119 | or as an addendum to the NOTICE text from the Work, provided
120 | that such additional attribution notices cannot be construed
121 | as modifying the License.
122 |
123 | You may add Your own copyright statement to Your modifications and
124 | may provide additional or different license terms and conditions
125 | for use, reproduction, or distribution of Your modifications, or
126 | for any such Derivative Works as a whole, provided Your use,
127 | reproduction, and distribution of the Work otherwise complies with
128 | the conditions stated in this License.
129 |
130 | 5. Submission of Contributions. Unless You explicitly state otherwise,
131 | any Contribution intentionally submitted for inclusion in the Work
132 | by You to the Licensor shall be under the terms and conditions of
133 | this License, without any additional terms or conditions.
134 | Notwithstanding the above, nothing herein shall supersede or modify
135 | the terms of any separate license agreement you may have executed
136 | with Licensor regarding such Contributions.
137 |
138 | 6. Trademarks. This License does not grant permission to use the trade
139 | names, trademarks, service marks, or product names of the Licensor,
140 | except as required for reasonable and customary use in describing the
141 | origin of the Work and reproducing the content of the NOTICE file.
142 |
143 | 7. Disclaimer of Warranty. Unless required by applicable law or
144 | agreed to in writing, Licensor provides the Work (and each
145 | Contributor provides its Contributions) on an "AS IS" BASIS,
146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
147 | implied, including, without limitation, any warranties or conditions
148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
149 | PARTICULAR PURPOSE. You are solely responsible for determining the
150 | appropriateness of using or redistributing the Work and assume any
151 | risks associated with Your exercise of permissions under this License.
152 |
153 | 8. Limitation of Liability. In no event and under no legal theory,
154 | whether in tort (including negligence), contract, or otherwise,
155 | unless required by applicable law (such as deliberate and grossly
156 | negligent acts) or agreed to in writing, shall any Contributor be
157 | liable to You for damages, including any direct, indirect, special,
158 | incidental, or consequential damages of any character arising as a
159 | result of this License or out of the use or inability to use the
160 | Work (including but not limited to damages for loss of goodwill,
161 | work stoppage, computer failure or malfunction, or any and all
162 | other commercial damages or losses), even if such Contributor
163 | has been advised of the possibility of such damages.
164 |
165 | 9. Accepting Warranty or Additional Liability. While redistributing
166 | the Work or Derivative Works thereof, You may choose to offer,
167 | and charge a fee for, acceptance of support, warranty, indemnity,
168 | or other liability obligations and/or rights consistent with this
169 | License. However, in accepting such obligations, You may act only
170 | on Your own behalf and on Your sole responsibility, not on behalf
171 | of any other Contributor, and only if You agree to indemnify,
172 | defend, and hold each Contributor harmless for any liability
173 | incurred by, or claims asserted against, such Contributor by reason
174 | of your accepting any such warranty or additional liability.
175 |
176 | END OF TERMS AND CONDITIONS
177 |
178 | APPENDIX: How to apply the Apache License to your work.
179 |
180 | To apply the Apache License to your work, attach the following
181 | boilerplate notice, with the fields enclosed by brackets "[]"
182 | replaced with your own identifying information. (Don't include
183 | the brackets!) The text should be enclosed in the appropriate
184 | comment syntax for the file format. We also recommend that a
185 | file or class name and description of purpose be included on the
186 | same "printed page" as the copyright notice for easier
187 | identification within third-party archives.
188 |
189 | Copyright [yyyy] [name of copyright owner]
190 |
191 | Licensed under the Apache License, Version 2.0 (the "License");
192 | you may not use this file except in compliance with the License.
193 | You may obtain a copy of the License at
194 |
195 | http://www.apache.org/licenses/LICENSE-2.0
196 |
197 | Unless required by applicable law or agreed to in writing, software
198 | distributed under the License is distributed on an "AS IS" BASIS,
199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
200 | See the License for the specific language governing permissions and
201 | limitations under the License.
202 |
--------------------------------------------------------------------------------
/docs/migrations.rst:
--------------------------------------------------------------------------------
1 | Database Migrations with Alembic
2 | ================================
3 |
4 | This guide covers how to use Alembic for database schema migrations with YDB SQLAlchemy.
5 |
6 | Overview
7 | --------
8 |
9 | Alembic is SQLAlchemy's database migration tool that allows you to:
10 |
11 | - Track database schema changes over time
12 | - Apply incremental schema updates
13 | - Rollback to previous schema versions
14 | - Generate migration scripts automatically
15 |
16 | YDB SQLAlchemy provides full Alembic integration with some YDB-specific considerations.
17 |
18 | Installation
19 | ------------
20 |
21 | Install Alembic alongside YDB SQLAlchemy:
22 |
23 | .. code-block:: bash
24 |
25 | pip install alembic ydb-sqlalchemy
26 |
27 | Initial Setup
28 | -------------
29 |
30 | 1. Initialize Alembic in your project:
31 |
32 | .. code-block:: bash
33 |
34 | alembic init migrations
35 |
36 | This creates an ``alembic.ini`` configuration file and a ``migrations/`` directory.
37 |
38 | 2. Configure ``alembic.ini``:
39 |
40 | .. code-block:: ini
41 |
42 | # alembic.ini
43 | [alembic]
44 | script_location = migrations
45 | prepend_sys_path = .
46 | version_path_separator = os
47 |
48 | # YDB connection string
49 | sqlalchemy.url = yql+ydb://localhost:2136/local
50 |
51 | [post_write_hooks]
52 |
53 | [loggers]
54 | keys = root,sqlalchemy,alembic
55 |
56 | [handlers]
57 | keys = console
58 |
59 | [formatters]
60 | keys = generic
61 |
62 | [logger_root]
63 | level = WARN
64 | handlers = console
65 | qualname =
66 |
67 | [logger_sqlalchemy]
68 | level = WARN
69 | handlers =
70 | qualname = sqlalchemy.engine
71 |
72 | [logger_alembic]
73 | level = INFO
74 | handlers =
75 | qualname = alembic
76 |
77 | [handler_console]
78 | class = StreamHandler
79 | args = (sys.stderr,)
80 | level = NOTSET
81 | formatter = generic
82 |
83 | [formatter_generic]
84 | format = %(levelname)-5.5s [%(name)s] %(message)s
85 | datefmt = %H:%M:%S
86 |
87 | YDB-Specific Configuration
88 | --------------------------
89 |
90 | YDB requires special configuration in ``env.py`` due to its unique characteristics:
91 |
92 | .. code-block:: python
93 |
94 | # migrations/env.py
95 | from logging.config import fileConfig
96 | import sqlalchemy as sa
97 | from sqlalchemy import engine_from_config, pool
98 | from alembic import context
99 | from alembic.ddl.impl import DefaultImpl
100 |
101 | # Import your models
102 | from myapp.models import Base
103 |
104 | config = context.config
105 |
106 | if config.config_file_name is not None:
107 | fileConfig(config.config_file_name)
108 |
109 | target_metadata = Base.metadata
110 |
111 | # YDB-specific implementation
112 | class YDBImpl(DefaultImpl):
113 | __dialect__ = "yql"
114 |
115 | def run_migrations_offline() -> None:
116 | """Run migrations in 'offline' mode."""
117 | url = config.get_main_option("sqlalchemy.url")
118 | context.configure(
119 | url=url,
120 | target_metadata=target_metadata,
121 | literal_binds=True,
122 | dialect_opts={"paramstyle": "named"},
123 | )
124 |
125 | with context.begin_transaction():
126 | context.run_migrations()
127 |
128 | def run_migrations_online() -> None:
129 | """Run migrations in 'online' mode."""
130 | connectable = engine_from_config(
131 | config.get_section(config.config_ini_section, {}),
132 | prefix="sqlalchemy.",
133 | poolclass=pool.NullPool,
134 | )
135 |
136 | with connectable.connect() as connection:
137 | context.configure(
138 | connection=connection,
139 | target_metadata=target_metadata
140 | )
141 |
142 | # YDB-specific: Custom version table structure
143 | ctx = context.get_context()
144 | ctx._version = sa.Table(
145 | ctx.version_table,
146 | sa.MetaData(),
147 | sa.Column("version_num", sa.String(32), nullable=False),
148 | sa.Column("id", sa.Integer(), nullable=True, primary_key=True),
149 | )
150 |
151 | with context.begin_transaction():
152 | context.run_migrations()
153 |
154 | if context.is_offline_mode():
155 | run_migrations_offline()
156 | else:
157 | run_migrations_online()
158 |
159 | Creating Your First Migration
160 | -----------------------------
161 |
162 | 1. Define your models:
163 |
164 | .. code-block:: python
165 |
166 | # models.py
167 | from sqlalchemy import Column, String, Integer
168 | from sqlalchemy.ext.declarative import declarative_base
169 | from ydb_sqlalchemy.sqlalchemy.types import UInt64
170 |
171 | Base = declarative_base()
172 |
173 | class User(Base):
174 | __tablename__ = 'users'
175 |
176 | id = Column(UInt64, primary_key=True)
177 | username = Column(String(50), nullable=False)
178 | email = Column(String(100), nullable=False)
179 | full_name = Column(String(200))
180 |
181 | 2. Generate the initial migration:
182 |
183 | .. code-block:: bash
184 |
185 | alembic revision --autogenerate -m "Create users table"
186 |
187 | This creates a migration file like ``001_create_users_table.py``:
188 |
189 | .. code-block:: python
190 |
191 | """Create users table
192 |
193 | Revision ID: 001
194 | Revises:
195 | Create Date: 2024-01-01 12:00:00.000000
196 | """
197 | from alembic import op
198 | import sqlalchemy as sa
199 | from ydb_sqlalchemy.sqlalchemy.types import UInt64
200 |
201 | revision = '001'
202 | down_revision = None
203 | branch_labels = None
204 | depends_on = None
205 |
206 | def upgrade() -> None:
207 | op.create_table('users',
208 | sa.Column('id', UInt64(), nullable=False),
209 | sa.Column('username', sa.String(length=50), nullable=False),
210 | sa.Column('email', sa.String(length=100), nullable=False),
211 | sa.Column('full_name', sa.String(length=200), nullable=True),
212 | sa.PrimaryKeyConstraint('id')
213 | )
214 |
215 | def downgrade() -> None:
216 | op.drop_table('users')
217 |
218 | 3. Apply the migration:
219 |
220 | .. code-block:: bash
221 |
222 | alembic upgrade head
223 |
224 | Common Migration Operations
225 | ---------------------------
226 |
227 | Adding a Column
228 | ~~~~~~~~~~~~~~~
229 |
230 | .. code-block:: python
231 |
232 | # Add a new column
233 | def upgrade() -> None:
234 | op.add_column('users', sa.Column('created_at', sa.DateTime(), nullable=True))
235 |
236 | def downgrade() -> None:
237 | op.drop_column('users', 'created_at')
238 |
239 | Modifying a Column
240 | ~~~~~~~~~~~~~~~~~~
241 |
242 | .. code-block:: python
243 |
244 | # Change column type (be careful with YDB limitations)
245 | def upgrade() -> None:
246 | op.alter_column('users', 'username',
247 | existing_type=sa.String(50),
248 | type_=sa.String(100),
249 | nullable=False)
250 |
251 | def downgrade() -> None:
252 | op.alter_column('users', 'username',
253 | existing_type=sa.String(100),
254 | type_=sa.String(50),
255 | nullable=False)
256 |
257 | Creating Indexes
258 | ~~~~~~~~~~~~~~~~
259 |
260 | .. code-block:: python
261 |
262 | def upgrade() -> None:
263 | op.create_index('ix_users_email', 'users', ['email'])
264 |
265 | def downgrade() -> None:
266 | op.drop_index('ix_users_email', table_name='users')
267 |
268 | Adding a New Table
269 | ~~~~~~~~~~~~~~~~~~
270 |
271 | .. code-block:: python
272 |
273 | def upgrade() -> None:
274 | op.create_table('posts',
275 | sa.Column('id', UInt64(), nullable=False),
276 | sa.Column('user_id', UInt64(), nullable=False),
277 | sa.Column('title', sa.String(200), nullable=False),
278 | sa.Column('content', sa.Text(), nullable=True),
279 | sa.Column('created_at', sa.DateTime(), nullable=False),
280 | sa.PrimaryKeyConstraint('id'),
281 | sa.ForeignKeyConstraint(['user_id'], ['users.id'])
282 | )
283 |
284 | def downgrade() -> None:
285 | op.drop_table('posts')
286 |
287 | YDB-Specific Considerations
288 | ---------------------------
289 |
290 | Primary Key Limitations
291 | ~~~~~~~~~~~~~~~~~~~~~~~~
292 |
293 | YDB doesn't support modifying primary key columns. Plan your primary keys carefully:
294 |
295 | .. code-block:: python
296 |
297 | # Good: Use appropriate primary key from the start
298 | class User(Base):
299 | __tablename__ = 'users'
300 | id = Column(UInt64, primary_key=True) # Can't be changed later
301 |
302 | # If you need to change primary key structure, you'll need to:
303 | # 1. Create new table with correct primary key
304 | # 2. Migrate data
305 | # 3. Drop old table
306 | # 4. Rename new table
307 |
308 | Data Type Constraints
309 | ~~~~~~~~~~~~~~~~~~~~~
310 |
311 | Some type changes are not supported:
312 |
313 | .. code-block:: python
314 |
315 | # Supported: Increasing string length
316 | op.alter_column('users', 'username',
317 | existing_type=sa.String(50),
318 | type_=sa.String(100))
319 |
320 | # Not supported: Changing fundamental type
321 | # op.alter_column('users', 'id',
322 | # existing_type=UInt32(),
323 | # type_=UInt64()) # This won't work
324 |
325 | Working with YDB Types
326 | ~~~~~~~~~~~~~~~~~~~~~~
327 |
328 | Use YDB-specific types in migrations:
329 |
330 | .. code-block:: python
331 |
332 | from ydb_sqlalchemy.sqlalchemy.types import (
333 | UInt64, UInt32, Decimal, YqlJSON, YqlDateTime
334 | )
335 |
336 | def upgrade() -> None:
337 | op.create_table('financial_records',
338 | sa.Column('id', UInt64(), nullable=False),
339 | sa.Column('amount', Decimal(precision=15, scale=2), nullable=False),
340 | sa.Column('metadata', YqlJSON(), nullable=True),
341 | sa.Column('created_at', YqlDateTime(timezone=True), nullable=False),
342 | sa.PrimaryKeyConstraint('id')
343 | )
344 |
345 | Advanced Migration Patterns
346 | ---------------------------
347 |
348 | Data Migrations
349 | ~~~~~~~~~~~~~~~
350 |
351 | Sometimes you need to migrate data along with schema:
352 |
353 | .. code-block:: python
354 |
355 | from alembic import op
356 | import sqlalchemy as sa
357 | from sqlalchemy.sql import table, column
358 |
359 | def upgrade() -> None:
360 | # Add new column
361 | op.add_column('users', sa.Column('status', sa.String(20), nullable=True))
362 |
363 | # Create a temporary table representation for data migration
364 | users_table = table('users',
365 | column('id', UInt64),
366 | column('status', sa.String)
367 | )
368 |
369 | # Update existing records
370 | op.execute(
371 | users_table.update().values(status='active')
372 | )
373 |
374 | # Make column non-nullable
375 | op.alter_column('users', 'status', nullable=False)
376 |
377 | def downgrade() -> None:
378 | op.drop_column('users', 'status')
379 |
380 | Conditional Migrations
381 | ~~~~~~~~~~~~~~~~~~~~~~
382 |
383 | .. code-block:: python
384 |
385 | def upgrade() -> None:
386 | # Check if column already exists
387 | conn = op.get_bind()
388 | inspector = sa.inspect(conn)
389 | columns = [col['name'] for col in inspector.get_columns('users')]
390 |
391 | if 'new_column' not in columns:
392 | op.add_column('users', sa.Column('new_column', sa.String(50)))
393 |
394 | Migration Best Practices
395 | ------------------------
396 |
397 | 1. **Test Migrations**: Always test migrations on a copy of production data
398 | 2. **Backup Data**: Backup your data before running migrations in production
399 | 3. **Review Generated Migrations**: Always review auto-generated migrations before applying
400 | 4. **Use Transactions**: Migrations run in transactions by default
401 | 5. **Plan Primary Keys**: Design primary keys carefully as they can't be easily changed
402 |
403 | .. code-block:: python
404 |
405 | # Good migration practices
406 | def upgrade() -> None:
407 | # Add columns as nullable first
408 | op.add_column('users', sa.Column('new_field', sa.String(100), nullable=True))
409 |
410 | # Populate data
411 | # ... data migration code ...
412 |
413 | # Then make non-nullable if needed
414 | op.alter_column('users', 'new_field', nullable=False)
415 |
416 | Common Commands
417 | ---------------
418 |
419 | .. code-block:: bash
420 |
421 | # Generate new migration
422 | alembic revision --autogenerate -m "Description of changes"
423 |
424 | # Apply all pending migrations
425 | alembic upgrade head
426 |
427 | # Apply specific migration
428 | alembic upgrade revision_id
429 |
430 | # Rollback one migration
431 | alembic downgrade -1
432 |
433 | # Rollback to specific revision
434 | alembic downgrade revision_id
435 |
436 | # Show current revision
437 | alembic current
438 |
439 | # Show migration history
440 | alembic history
441 |
442 | # Show pending migrations
443 | alembic show head
444 |
445 | Troubleshooting
446 | ---------------
447 |
448 | **Migration Fails with "Table already exists"**
449 | - Check if migration was partially applied
450 | - Use ``alembic stamp head`` to mark current state without running migrations
451 |
452 | **Primary Key Constraint Errors**
453 | - YDB requires primary keys on all tables
454 | - Ensure all tables have appropriate primary keys
455 |
456 | **Type Conversion Errors**
457 | - Some type changes aren't supported in YDB
458 | - Create new column, migrate data, drop old column instead
459 |
460 | **Connection Issues**
461 | - Verify YDB is running and accessible
462 | - Check connection string in ``alembic.ini``
463 |
464 | Example Project Structure
465 | -------------------------
466 |
467 | .. code-block:: text
468 |
469 | myproject/
470 | ├── alembic.ini
471 | ├── migrations/
472 | │ ├── env.py
473 | │ ├── script.py.mako
474 | │ └── versions/
475 | │ ├── 001_create_users_table.py
476 | │ ├── 002_add_posts_table.py
477 | │ └── 003_add_user_status.py
478 | ├── models/
479 | │ ├── __init__.py
480 | │ ├── user.py
481 | │ └── post.py
482 | └── main.py
483 |
484 | This setup provides a robust foundation for managing YDB schema changes over time using Alembic migrations.
485 |
--------------------------------------------------------------------------------
/ydb_sqlalchemy/sqlalchemy/__init__.py:
--------------------------------------------------------------------------------
1 | """
2 | Experimental
3 | Work in progress, breaking changes are possible.
4 | """
5 |
6 | import collections
7 | import collections.abc
8 | from typing import Any, Mapping, Optional, Sequence, Tuple, Union
9 |
10 | import sqlalchemy as sa
11 | import ydb
12 | from sqlalchemy import util
13 | from sqlalchemy.engine import characteristics, reflection
14 | from sqlalchemy.engine.default import DefaultExecutionContext, StrCompileDialect
15 | from sqlalchemy.exc import NoSuchTableError
16 | from sqlalchemy.sql import functions
17 |
18 | from sqlalchemy.sql.elements import ClauseList
19 |
20 | import ydb_dbapi
21 | from ydb_sqlalchemy.sqlalchemy.dbapi_adapter import AdaptedAsyncConnection
22 | from ydb_sqlalchemy.sqlalchemy.dml import Upsert
23 |
24 | from ydb_sqlalchemy.sqlalchemy.compiler import YqlCompiler, YqlDDLCompiler, YqlIdentifierPreparer, YqlTypeCompiler
25 |
26 | from . import types
27 |
28 |
29 | OLD_SA = sa.__version__ < "2."
30 |
31 |
32 | class ParametrizedFunction(functions.Function):
33 | __visit_name__ = "parametrized_function"
34 |
35 | def __init__(self, name, params, *args, **kwargs):
36 | super(ParametrizedFunction, self).__init__(name, *args, **kwargs)
37 | self._func_name = name
38 | self._func_params = params
39 | self.params_expr = ClauseList(operator=functions.operators.comma_op, group_contents=True, *params).self_group()
40 |
41 |
42 | def upsert(table):
43 | return Upsert(table)
44 |
45 |
46 | COLUMN_TYPES = {
47 | ydb.PrimitiveType.Int8: sa.INTEGER,
48 | ydb.PrimitiveType.Int16: sa.INTEGER,
49 | ydb.PrimitiveType.Int32: sa.INTEGER,
50 | ydb.PrimitiveType.Int64: sa.INTEGER,
51 | ydb.PrimitiveType.Uint8: sa.INTEGER,
52 | ydb.PrimitiveType.Uint16: sa.INTEGER,
53 | ydb.PrimitiveType.Uint32: types.UInt32,
54 | ydb.PrimitiveType.Uint64: types.UInt64,
55 | ydb.PrimitiveType.Float: sa.FLOAT,
56 | ydb.PrimitiveType.Double: sa.FLOAT,
57 | ydb.PrimitiveType.String: sa.BINARY,
58 | ydb.PrimitiveType.Utf8: sa.TEXT,
59 | ydb.PrimitiveType.Json: sa.JSON,
60 | ydb.PrimitiveType.JsonDocument: sa.JSON,
61 | ydb.DecimalType: sa.DECIMAL,
62 | ydb.PrimitiveType.Yson: sa.TEXT,
63 | ydb.PrimitiveType.Date: sa.DATE,
64 | ydb.PrimitiveType.Date32: sa.DATE,
65 | ydb.PrimitiveType.Timestamp64: sa.TIMESTAMP,
66 | ydb.PrimitiveType.Datetime64: sa.DATETIME,
67 | ydb.PrimitiveType.Datetime: sa.DATETIME,
68 | ydb.PrimitiveType.Timestamp: sa.TIMESTAMP,
69 | ydb.PrimitiveType.Interval: sa.INTEGER,
70 | ydb.PrimitiveType.Bool: sa.BOOLEAN,
71 | ydb.PrimitiveType.DyNumber: sa.TEXT,
72 | }
73 |
74 |
75 | def _get_column_info(t):
76 | nullable = False
77 | if isinstance(t, ydb.OptionalType):
78 | nullable = True
79 | t = t.item
80 |
81 | if isinstance(t, ydb.DecimalType):
82 | return sa.DECIMAL(precision=t.precision, scale=t.scale), nullable
83 |
84 | return COLUMN_TYPES[t], nullable
85 |
86 |
87 | class YdbRequestSettingsCharacteristic(characteristics.ConnectionCharacteristic):
88 | def reset_characteristic(self, dialect: "YqlDialect", dbapi_connection: ydb_dbapi.Connection) -> None:
89 | dialect.reset_ydb_request_settings(dbapi_connection)
90 |
91 | def set_characteristic(
92 | self, dialect: "YqlDialect", dbapi_connection: ydb_dbapi.Connection, value: ydb.BaseRequestSettings
93 | ) -> None:
94 | dialect.set_ydb_request_settings(dbapi_connection, value)
95 |
96 | def get_characteristic(
97 | self, dialect: "YqlDialect", dbapi_connection: ydb_dbapi.Connection
98 | ) -> ydb.BaseRequestSettings:
99 | return dialect.get_ydb_request_settings(dbapi_connection)
100 |
101 |
102 | class YdbRetrySettingsCharacteristic(characteristics.ConnectionCharacteristic):
103 | def reset_characteristic(self, dialect: "YqlDialect", dbapi_connection: ydb_dbapi.Connection) -> None:
104 | dialect.reset_ydb_retry_settings(dbapi_connection)
105 |
106 | def set_characteristic(
107 | self, dialect: "YqlDialect", dbapi_connection: ydb_dbapi.Connection, value: ydb.RetrySettings
108 | ) -> None:
109 | dialect.set_ydb_retry_settings(dbapi_connection, value)
110 |
111 | def get_characteristic(self, dialect: "YqlDialect", dbapi_connection: ydb_dbapi.Connection) -> ydb.RetrySettings:
112 | return dialect.get_ydb_retry_settings(dbapi_connection)
113 |
114 |
115 | class YqlDialect(StrCompileDialect):
116 | name = "yql"
117 | driver = "ydb"
118 |
119 | supports_alter = False
120 | max_identifier_length = 63
121 | supports_sane_rowcount = False
122 | supports_statement_cache = True
123 |
124 | supports_native_enum = False
125 | supports_native_boolean = True
126 | supports_native_decimal = True
127 | supports_smallserial = False
128 | supports_schemas = False
129 | supports_constraint_comments = False
130 | supports_json_type = True
131 |
132 | insert_returning = False
133 | update_returning = False
134 | delete_returning = False
135 |
136 | supports_sequences = False
137 | sequences_optional = False
138 | preexecute_autoincrement_sequences = True
139 | postfetch_lastrowid = False
140 |
141 | supports_default_values = False
142 | supports_empty_insert = False
143 | supports_multivalues_insert = True
144 | default_paramstyle = "qmark"
145 |
146 | isolation_level = None
147 |
148 | preparer = YqlIdentifierPreparer
149 | statement_compiler = YqlCompiler
150 | ddl_compiler = YqlDDLCompiler
151 | type_compiler = YqlTypeCompiler
152 | colspecs = {
153 | sa.types.JSON: types.YqlJSON,
154 | sa.types.JSON.JSONPathType: types.YqlJSON.YqlJSONPathType,
155 | sa.types.Date: types.YqlDate,
156 | sa.types.DateTime: types.YqlTimestamp, # Because YDB's DateTime doesn't store microseconds
157 | sa.types.DATETIME: types.YqlDateTime,
158 | sa.types.TIMESTAMP: types.YqlTimestamp,
159 | sa.types.DECIMAL: types.Decimal,
160 | }
161 |
162 | connection_characteristics = util.immutabledict(
163 | {
164 | "isolation_level": characteristics.IsolationLevelCharacteristic(),
165 | "ydb_request_settings": YdbRequestSettingsCharacteristic(),
166 | "ydb_retry_settings": YdbRetrySettingsCharacteristic(),
167 | }
168 | )
169 |
170 | construct_arguments = [
171 | (
172 | sa.schema.Table,
173 | {
174 | "auto_partitioning_by_size": None,
175 | "auto_partitioning_by_load": None,
176 | "auto_partitioning_partition_size_mb": None,
177 | "auto_partitioning_min_partitions_count": None,
178 | "auto_partitioning_max_partitions_count": None,
179 | "uniform_partitions": None,
180 | "partition_at_keys": None,
181 | },
182 | ),
183 | (
184 | sa.schema.Index,
185 | {
186 | "async": False,
187 | "cover": [],
188 | },
189 | ),
190 | ]
191 |
192 | @classmethod
193 | def import_dbapi(cls: Any):
194 | return ydb_dbapi
195 |
196 | @classmethod
197 | def dbapi(cls):
198 | return cls.import_dbapi()
199 |
200 | def __init__(
201 | self,
202 | json_serializer=None,
203 | json_deserializer=None,
204 | _add_declare_for_yql_stmt_vars=False,
205 | **kwargs,
206 | ):
207 | super().__init__(**kwargs)
208 |
209 | self._json_deserializer = json_deserializer
210 | self._json_serializer = json_serializer
211 | # NOTE: _add_declare_for_yql_stmt_vars is temporary and is soon to be removed.
212 | # no need in declare in yql statement here since ydb 24-1
213 | self._add_declare_for_yql_stmt_vars = _add_declare_for_yql_stmt_vars
214 |
215 | def _describe_table(self, connection, table_name, schema=None) -> ydb.TableDescription:
216 | if schema is not None:
217 | raise ydb_dbapi.NotSupportedError("unsupported on non empty schema")
218 |
219 | qt = table_name if isinstance(table_name, str) else table_name.name
220 | raw_conn = connection.connection
221 | try:
222 | return raw_conn.describe(qt)
223 | except ydb_dbapi.DatabaseError as e:
224 | raise NoSuchTableError(qt) from e
225 |
226 | def get_view_names(self, connection, schema=None, **kw: Any):
227 | return []
228 |
229 | @reflection.cache
230 | def get_columns(self, connection, table_name, schema=None, **kw):
231 | table = self._describe_table(connection, table_name, schema)
232 | as_compatible = []
233 | for column in table.columns:
234 | col_type, nullable = _get_column_info(column.type)
235 | as_compatible.append(
236 | {
237 | "name": column.name,
238 | "type": col_type,
239 | "nullable": nullable,
240 | "default": None,
241 | }
242 | )
243 |
244 | return as_compatible
245 |
246 | @reflection.cache
247 | def get_table_names(self, connection, schema=None, **kw):
248 | if schema:
249 | raise ydb_dbapi.NotSupportedError("unsupported on non empty schema")
250 |
251 | raw_conn = connection.connection
252 | return raw_conn.get_table_names()
253 |
254 | @reflection.cache
255 | def has_table(self, connection, table_name, schema=None, **kwargs):
256 | try:
257 | self._describe_table(connection, table_name, schema)
258 | return True
259 | except NoSuchTableError:
260 | return False
261 |
262 | @reflection.cache
263 | def get_pk_constraint(self, connection, table_name, schema=None, **kwargs):
264 | table = self._describe_table(connection, table_name, schema)
265 | return {"constrained_columns": table.primary_key, "name": None}
266 |
267 | @reflection.cache
268 | def get_foreign_keys(self, connection, table_name, schema=None, **kwargs):
269 | # foreign keys unsupported
270 | return []
271 |
272 | @reflection.cache
273 | def get_indexes(self, connection, table_name, schema=None, **kwargs):
274 | table = self._describe_table(connection, table_name, schema)
275 | indexes: list[ydb.TableIndex] = table.indexes
276 | if OLD_SA:
277 | sa_indexes: list[dict] = []
278 | for index in indexes:
279 | sa_indexes.append(
280 | {
281 | "name": index.name,
282 | "column_names": index.index_columns,
283 | "unique": False,
284 | "dialect_options": {
285 | "ydb_async": False, # TODO After https://github.com/ydb-platform/ydb-python-sdk/issues/351
286 | "ydb_cover": [], # TODO After https://github.com/ydb-platform/ydb-python-sdk/issues/409
287 | },
288 | }
289 | )
290 | return sa_indexes
291 |
292 | sa_indexes: list[sa.engine.interfaces.ReflectedIndex] = []
293 | for index in indexes:
294 | sa_indexes.append(
295 | sa.engine.interfaces.ReflectedIndex(
296 | name=index.name,
297 | column_names=index.index_columns,
298 | unique=False,
299 | dialect_options={
300 | "ydb_async": False, # TODO After https://github.com/ydb-platform/ydb-python-sdk/issues/351
301 | "ydb_cover": [], # TODO After https://github.com/ydb-platform/ydb-python-sdk/issues/409
302 | },
303 | )
304 | )
305 | return sa_indexes
306 |
307 | def set_isolation_level(self, dbapi_connection: ydb_dbapi.Connection, level: str) -> None:
308 | dbapi_connection.set_isolation_level(level)
309 |
310 | def get_default_isolation_level(self, dbapi_conn: ydb_dbapi.Connection) -> str:
311 | return ydb_dbapi.IsolationLevel.AUTOCOMMIT
312 |
313 | def get_isolation_level(self, dbapi_connection: ydb_dbapi.Connection) -> str:
314 | return dbapi_connection.get_isolation_level()
315 |
316 | def set_ydb_request_settings(
317 | self,
318 | dbapi_connection: ydb_dbapi.Connection,
319 | value: ydb.BaseRequestSettings,
320 | ) -> None:
321 | dbapi_connection.set_ydb_request_settings(value)
322 |
323 | def reset_ydb_request_settings(self, dbapi_connection: ydb_dbapi.Connection):
324 | self.set_ydb_request_settings(dbapi_connection, ydb.BaseRequestSettings())
325 |
326 | def get_ydb_request_settings(self, dbapi_connection: ydb_dbapi.Connection) -> ydb.BaseRequestSettings:
327 | return dbapi_connection.get_ydb_request_settings()
328 |
329 | def set_ydb_retry_settings(
330 | self,
331 | dbapi_connection: ydb_dbapi.Connection,
332 | value: ydb.RetrySettings,
333 | ) -> None:
334 | dbapi_connection.set_ydb_retry_settings(value)
335 |
336 | def reset_ydb_retry_settings(self, dbapi_connection: ydb_dbapi.Connection):
337 | self.set_ydb_retry_settings(dbapi_connection, ydb.RetrySettings())
338 |
339 | def get_ydb_retry_settings(self, dbapi_connection: ydb_dbapi.Connection) -> ydb.RetrySettings:
340 | return dbapi_connection.get_ydb_retry_settings()
341 |
342 | def create_connect_args(self, url):
343 | args, kwargs = super().create_connect_args(url)
344 | # YDB database name should start with '/'
345 | if "database" in kwargs:
346 | if not kwargs["database"].startswith("/"):
347 | kwargs["database"] = "/" + kwargs["database"]
348 |
349 | return [args, kwargs]
350 |
351 | def connect(self, *cargs, **cparams):
352 | return self.dbapi.connect(*cargs, **cparams)
353 |
354 | def do_begin(self, dbapi_connection: ydb_dbapi.Connection) -> None:
355 | dbapi_connection.begin()
356 |
357 | def do_rollback(self, dbapi_connection: ydb_dbapi.Connection) -> None:
358 | dbapi_connection.rollback()
359 |
360 | def do_commit(self, dbapi_connection: ydb_dbapi.Connection) -> None:
361 | dbapi_connection.commit()
362 |
363 | def _handle_column_name(self, variable):
364 | return "`" + variable + "`"
365 |
366 | def _format_variables(
367 | self,
368 | statement: str,
369 | parameters: Optional[Union[Sequence[Mapping[str, Any]], Mapping[str, Any]]],
370 | execute_many: bool,
371 | ) -> Tuple[str, Optional[Union[Sequence[Mapping[str, Any]], Mapping[str, Any]]]]:
372 | formatted_statement = statement
373 | formatted_parameters = None
374 |
375 | if parameters:
376 | if execute_many:
377 | parameters_sequence: Sequence[Mapping[str, Any]] = parameters
378 | variable_names = set()
379 | formatted_parameters = []
380 | for i in range(len(parameters_sequence)):
381 | variable_names.update(set(parameters_sequence[i].keys()))
382 | formatted_parameters.append({f"${k}": v for k, v in parameters_sequence[i].items()})
383 | else:
384 | variable_names = set(parameters.keys())
385 | formatted_parameters = {f"${k}": v for k, v in parameters.items()}
386 |
387 | formatted_variable_names = {
388 | variable_name: f"${self._handle_column_name(variable_name)}" for variable_name in variable_names
389 | }
390 | formatted_statement = formatted_statement % formatted_variable_names
391 |
392 | formatted_statement = formatted_statement.replace("%%", "%")
393 | return formatted_statement, formatted_parameters
394 |
395 | def _add_declare_for_yql_stmt_vars_impl(self, statement, parameters_types):
396 | declarations = "\n".join(
397 | [
398 | f"DECLARE $`{param_name[1:] if param_name.startswith('$') else param_name}` as {str(param_type)};"
399 | for param_name, param_type in parameters_types.items()
400 | ]
401 | )
402 | return f"{declarations}\n{statement}"
403 |
404 | def __merge_parameters_values_and_types(
405 | self, values: Mapping[str, Any], types: Mapping[str, Any], execute_many: bool
406 | ) -> Sequence[Mapping[str, ydb.TypedValue]]:
407 | if isinstance(values, collections.abc.Mapping):
408 | values = [values]
409 |
410 | result_list = []
411 | for value_map in values:
412 | result = {}
413 | for key in value_map.keys():
414 | if key in types:
415 | result[key] = ydb.TypedValue(value_map[key], types[key])
416 | else:
417 | result[key] = value_map[key]
418 | result_list.append(result)
419 | return result_list if execute_many else result_list[0]
420 |
421 | def _prepare_ydb_query(
422 | self,
423 | statement: str,
424 | context: Optional[DefaultExecutionContext] = None,
425 | parameters: Optional[Union[Sequence[Mapping[str, Any]], Mapping[str, Any]]] = None,
426 | execute_many: bool = False,
427 | ) -> Tuple[Optional[Union[Sequence[Mapping[str, Any]], Mapping[str, Any]]]]:
428 | is_ddl = context.isddl if context is not None else False
429 |
430 | if not is_ddl and parameters:
431 | parameters_types = context.compiled.get_bind_types(parameters)
432 | if parameters_types != {}:
433 | parameters = self.__merge_parameters_values_and_types(parameters, parameters_types, execute_many)
434 | statement, parameters = self._format_variables(statement, parameters, execute_many)
435 | if self._add_declare_for_yql_stmt_vars:
436 | statement = self._add_declare_for_yql_stmt_vars_impl(statement, parameters_types)
437 | return statement, parameters
438 |
439 | statement, parameters = self._format_variables(statement, parameters, execute_many)
440 | return statement, parameters
441 |
442 | def do_ping(self, dbapi_connection: ydb_dbapi.Connection) -> bool:
443 | cursor = dbapi_connection.cursor()
444 | statement, _ = self._prepare_ydb_query(self._dialect_specific_select_one)
445 | try:
446 | cursor.execute(statement)
447 | finally:
448 | cursor.close()
449 | return True
450 |
451 | def do_executemany(
452 | self,
453 | cursor: ydb_dbapi.Cursor,
454 | statement: str,
455 | parameters: Optional[Sequence[Mapping[str, Any]]],
456 | context: Optional[DefaultExecutionContext] = None,
457 | ) -> None:
458 | operation, parameters = self._prepare_ydb_query(statement, context, parameters, execute_many=True)
459 | cursor.executemany(operation, parameters)
460 |
461 | def do_execute(
462 | self,
463 | cursor: ydb_dbapi.Cursor,
464 | statement: str,
465 | parameters: Optional[Mapping[str, Any]] = None,
466 | context: Optional[DefaultExecutionContext] = None,
467 | ) -> None:
468 | operation, parameters = self._prepare_ydb_query(statement, context, parameters, execute_many=False)
469 | is_ddl = context.isddl if context is not None else False
470 | if is_ddl:
471 | cursor.execute_scheme(operation, parameters)
472 | else:
473 | cursor.execute(operation, parameters)
474 |
475 |
476 | class AsyncYqlDialect(YqlDialect):
477 | driver = "ydb_async"
478 | is_async = True
479 | supports_statement_cache = True
480 |
481 | def connect(self, *cargs, **cparams):
482 | return AdaptedAsyncConnection(util.await_only(self.dbapi.async_connect(*cargs, **cparams)))
483 |
--------------------------------------------------------------------------------
/ydb_sqlalchemy/sqlalchemy/compiler/base.py:
--------------------------------------------------------------------------------
1 | import collections
2 | import sqlalchemy as sa
3 | import ydb
4 | from ydb_dbapi import NotSupportedError
5 |
6 | from sqlalchemy.exc import CompileError
7 | from sqlalchemy.sql import ddl
8 | from sqlalchemy.sql.compiler import (
9 | DDLCompiler,
10 | IdentifierPreparer,
11 | StrSQLCompiler,
12 | StrSQLTypeCompiler,
13 | selectable,
14 | )
15 | from typing import (
16 | Any,
17 | Dict,
18 | List,
19 | Mapping,
20 | Sequence,
21 | Optional,
22 | Tuple,
23 | Type,
24 | Union,
25 | )
26 |
27 |
28 | from .. import types
29 |
30 |
31 | OLD_SA = sa.__version__ < "2."
32 | if OLD_SA:
33 | from sqlalchemy import bindparam as _bindparam
34 | from sqlalchemy import cast as _cast
35 | else:
36 | from sqlalchemy import BindParameter as _bindparam
37 | from sqlalchemy import Cast as _cast
38 |
39 |
40 | STR_QUOTE_MAP = {
41 | "'": "\\'",
42 | "\\": "\\\\",
43 | "\0": "\\0",
44 | "\b": "\\b",
45 | "\f": "\\f",
46 | "\r": "\\r",
47 | "\n": "\\n",
48 | "\t": "\\t",
49 | "%": "%%",
50 | }
51 |
52 |
53 | COMPOUND_KEYWORDS = {
54 | selectable.CompoundSelect.UNION: "UNION ALL",
55 | selectable.CompoundSelect.UNION_ALL: "UNION ALL",
56 | selectable.CompoundSelect.EXCEPT: "EXCEPT",
57 | selectable.CompoundSelect.EXCEPT_ALL: "EXCEPT ALL",
58 | selectable.CompoundSelect.INTERSECT: "INTERSECT",
59 | selectable.CompoundSelect.INTERSECT_ALL: "INTERSECT ALL",
60 | }
61 |
62 |
63 | class BaseYqlTypeCompiler(StrSQLTypeCompiler):
64 | def visit_JSON(self, type_: Union[sa.JSON, types.YqlJSON], **kw):
65 | return "JSON"
66 |
67 | def visit_CHAR(self, type_: sa.CHAR, **kw):
68 | return "UTF8"
69 |
70 | def visit_VARCHAR(self, type_: sa.VARCHAR, **kw):
71 | return "UTF8"
72 |
73 | def visit_unicode(self, type_: sa.Unicode, **kw):
74 | return "UTF8"
75 |
76 | def visit_NVARCHAR(self, type_: sa.NVARCHAR, **kw):
77 | return "UTF8"
78 |
79 | def visit_TEXT(self, type_: sa.TEXT, **kw):
80 | return "UTF8"
81 |
82 | def visit_FLOAT(self, type_: sa.FLOAT, **kw):
83 | return "FLOAT"
84 |
85 | def visit_BOOLEAN(self, type_: sa.BOOLEAN, **kw):
86 | return "BOOL"
87 |
88 | def visit_uint64(self, type_: types.UInt64, **kw):
89 | return "UInt64"
90 |
91 | def visit_uint32(self, type_: types.UInt32, **kw):
92 | return "UInt32"
93 |
94 | def visit_uint16(self, type_: types.UInt16, **kw):
95 | return "UInt16"
96 |
97 | def visit_uint8(self, type_: types.UInt8, **kw):
98 | return "UInt8"
99 |
100 | def visit_int64(self, type_: types.Int64, **kw):
101 | return "Int64"
102 |
103 | def visit_int32(self, type_: types.Int32, **kw):
104 | return "Int32"
105 |
106 | def visit_int16(self, type_: types.Int16, **kw):
107 | return "Int16"
108 |
109 | def visit_int8(self, type_: types.Int8, **kw):
110 | return "Int8"
111 |
112 | def visit_INTEGER(self, type_: sa.INTEGER, **kw):
113 | return "Int64"
114 |
115 | def visit_NUMERIC(self, type_: sa.Numeric, **kw):
116 | return f"Decimal({type_.precision}, {type_.scale})"
117 |
118 | def visit_DECIMAL(self, type_: sa.DECIMAL, **kw):
119 | precision = getattr(type_, "precision", None) or 22
120 | scale = getattr(type_, "scale", None) or 9
121 | return f"Decimal({precision}, {scale})"
122 |
123 | def visit_BINARY(self, type_: sa.BINARY, **kw):
124 | return "String"
125 |
126 | def visit_BLOB(self, type_: sa.BLOB, **kw):
127 | return "String"
128 |
129 | def visit_datetime(self, type_: sa.TIMESTAMP, **kw):
130 | return self.visit_TIMESTAMP(type_, **kw)
131 |
132 | def visit_DATETIME(self, type_: sa.DATETIME, **kw):
133 | return "DateTime"
134 |
135 | def visit_TIMESTAMP(self, type_: sa.TIMESTAMP, **kw):
136 | return "Timestamp"
137 |
138 | def visit_date32(self, type_: types.YqlDate32, **kw):
139 | return "Date32"
140 |
141 | def visit_timestamp64(self, type_: types.YqlTimestamp64, **kw):
142 | return "Timestamp64"
143 |
144 | def visit_datetime64(self, type_: types.YqlDateTime64, **kw):
145 | return "DateTime64"
146 |
147 | def visit_list_type(self, type_: types.ListType, **kw):
148 | inner = self.process(type_.item_type, **kw)
149 | return f"List<{inner}>"
150 |
151 | def visit_ARRAY(self, type_: sa.ARRAY, **kw):
152 | inner = self.process(type_.item_type, **kw)
153 | return f"List<{inner}>"
154 |
155 | def visit_struct_type(self, type_: types.StructType, **kw):
156 | text = "Struct<"
157 | for field, field_type in type_.fields_types:
158 | text += f"{field}:{self.process(field_type, **kw)}"
159 | return text + ">"
160 |
161 | def get_ydb_type(
162 | self, type_: sa.types.TypeEngine, is_optional: bool
163 | ) -> Union[ydb.PrimitiveType, ydb.AbstractTypeBuilder]:
164 | if isinstance(type_, sa.TypeDecorator):
165 | type_ = type_.impl
166 |
167 | if isinstance(type_, (sa.Text, sa.String)):
168 | ydb_type = ydb.PrimitiveType.Utf8
169 |
170 | # Integers
171 | elif isinstance(type_, types.UInt64):
172 | ydb_type = ydb.PrimitiveType.Uint64
173 | elif isinstance(type_, types.UInt32):
174 | ydb_type = ydb.PrimitiveType.Uint32
175 | elif isinstance(type_, types.UInt16):
176 | ydb_type = ydb.PrimitiveType.Uint16
177 | elif isinstance(type_, types.UInt8):
178 | ydb_type = ydb.PrimitiveType.Uint8
179 | elif isinstance(type_, types.Int64):
180 | ydb_type = ydb.PrimitiveType.Int64
181 | elif isinstance(type_, types.Int32):
182 | ydb_type = ydb.PrimitiveType.Int32
183 | elif isinstance(type_, types.Int16):
184 | ydb_type = ydb.PrimitiveType.Int16
185 | elif isinstance(type_, types.Int8):
186 | ydb_type = ydb.PrimitiveType.Int8
187 | elif isinstance(type_, sa.Integer):
188 | ydb_type = ydb.PrimitiveType.Int64
189 | # Integers
190 |
191 | # Json
192 | elif isinstance(type_, sa.JSON):
193 | ydb_type = ydb.PrimitiveType.Json
194 | elif isinstance(type_, sa.JSON.JSONStrIndexType):
195 | ydb_type = ydb.PrimitiveType.Utf8
196 | elif isinstance(type_, sa.JSON.JSONIntIndexType):
197 | ydb_type = ydb.PrimitiveType.Int64
198 | elif isinstance(type_, sa.JSON.JSONPathType):
199 | ydb_type = ydb.PrimitiveType.Utf8
200 | elif isinstance(type_, types.YqlJSON):
201 | ydb_type = ydb.PrimitiveType.Json
202 | elif isinstance(type_, types.YqlJSON.YqlJSONPathType):
203 | ydb_type = ydb.PrimitiveType.Utf8
204 | # Json
205 | elif isinstance(type_, types.YqlDate32):
206 | ydb_type = ydb.PrimitiveType.Date32
207 | elif isinstance(type_, types.YqlTimestamp64):
208 | ydb_type = ydb.PrimitiveType.Timestamp64
209 | elif isinstance(type_, types.YqlDateTime64):
210 | ydb_type = ydb.PrimitiveType.Datetime64
211 | elif isinstance(type_, sa.DATETIME):
212 | ydb_type = ydb.PrimitiveType.Datetime
213 | elif isinstance(type_, sa.TIMESTAMP):
214 | ydb_type = ydb.PrimitiveType.Timestamp
215 | elif isinstance(type_, sa.DateTime):
216 | ydb_type = ydb.PrimitiveType.Timestamp
217 | elif isinstance(type_, sa.Date):
218 | ydb_type = ydb.PrimitiveType.Date
219 | elif isinstance(type_, sa.BINARY):
220 | ydb_type = ydb.PrimitiveType.String
221 | elif isinstance(type_, sa.Float):
222 | ydb_type = ydb.PrimitiveType.Float
223 | elif isinstance(type_, sa.Boolean):
224 | ydb_type = ydb.PrimitiveType.Bool
225 | elif isinstance(type_, sa.Numeric):
226 | precision = getattr(type_, "precision", None) or 22
227 | scale = getattr(type_, "scale", None) or 9
228 | ydb_type = ydb.DecimalType(precision, scale)
229 | elif isinstance(type_, (types.ListType, sa.ARRAY)):
230 | ydb_type = ydb.ListType(self.get_ydb_type(type_.item_type, is_optional=False))
231 | elif isinstance(type_, sa.TupleType):
232 | ydb_type = ydb.TupleType()
233 | for item_type in type_.types:
234 | ydb_type.add_element(self.get_ydb_type(item_type, is_optional=False))
235 | elif isinstance(type_, types.StructType):
236 | ydb_type = ydb.StructType()
237 | for field, field_type in type_.fields_types.items():
238 | ydb_type.add_member(field, self.get_ydb_type(field_type(), is_optional=False))
239 | else:
240 | raise NotSupportedError(f"{type_} bind variables not supported")
241 |
242 | if is_optional:
243 | return ydb.OptionalType(ydb_type)
244 |
245 | return ydb_type
246 |
247 |
248 | class BaseYqlCompiler(StrSQLCompiler):
249 | compound_keywords = COMPOUND_KEYWORDS
250 | _type_compiler_cls = BaseYqlTypeCompiler
251 |
252 | def get_from_hint_text(self, table, text):
253 | return text
254 |
255 | def group_by_clause(self, select, **kw):
256 | # Hack to ensure it is possible to define labels in groupby.
257 | kw.update(within_columns_clause=True)
258 | return super(BaseYqlCompiler, self).group_by_clause(select, **kw)
259 |
260 | def limit_clause(self, select, **kw):
261 | text = ""
262 | if select._limit_clause is not None:
263 | limit_clause = self._maybe_cast(
264 | select._limit_clause, types.UInt64, skip_types=(types.UInt64, types.UInt32, types.UInt16, types.UInt8)
265 | )
266 | text += "\n LIMIT " + self.process(limit_clause, **kw)
267 | if select._offset_clause is not None:
268 | offset_clause = self._maybe_cast(
269 | select._offset_clause, types.UInt64, skip_types=(types.UInt64, types.UInt32, types.UInt16, types.UInt8)
270 | )
271 | if select._limit_clause is None:
272 | text += "\n LIMIT 1000" # For some reason, YDB do not support LIMIT NULL OFFSET
273 | text += " OFFSET " + self.process(offset_clause, **kw)
274 | return text
275 |
276 | def render_literal_value(self, value, type_):
277 | if isinstance(value, str):
278 | value = "".join(STR_QUOTE_MAP.get(x, x) for x in value)
279 | return f"'{value}'"
280 | return super().render_literal_value(value, type_)
281 |
282 | def visit_parametrized_function(self, func, **kwargs):
283 | name = func.name
284 | name_parts = []
285 | for name in name.split("::"):
286 | fname = (
287 | self.preparer.quote(name)
288 | if self.preparer._requires_quotes_illegal_chars(name) or isinstance(name, sa.sql.elements.quoted_name)
289 | else name
290 | )
291 |
292 | name_parts.append(fname)
293 |
294 | name = "::".join(name_parts)
295 | params = func.params_expr._compiler_dispatch(self, **kwargs)
296 | args = self.function_argspec(func, **kwargs)
297 | return "%(name)s%(params)s%(args)s" % dict(name=name, params=params, args=args)
298 |
299 | def visit_function(self, func, add_to_result_map=None, **kwargs):
300 | # Copypaste of `sa.sql.compiler.SQLCompiler.visit_function` with
301 | # `::` as namespace separator instead of `.`
302 | if add_to_result_map:
303 | add_to_result_map(func.name, func.name, (), func.type)
304 |
305 | disp = getattr(self, f"visit_{func.name.lower()}_func", None)
306 | if disp:
307 | return disp(func, **kwargs)
308 |
309 | name = sa.sql.compiler.FUNCTIONS.get(func.__class__)
310 | if name:
311 | if func._has_args:
312 | name += "%(expr)s"
313 | else:
314 | name = func.name
315 | name = (
316 | self.preparer.quote(name)
317 | if self.preparer._requires_quotes_illegal_chars(name) or isinstance(name, sa.sql.elements.quoted_name)
318 | else name
319 | )
320 | name += "%(expr)s"
321 |
322 | return "::".join(
323 | [
324 | (
325 | self.preparer.quote(tok)
326 | if self.preparer._requires_quotes_illegal_chars(tok)
327 | or isinstance(name, sa.sql.elements.quoted_name)
328 | else tok
329 | )
330 | for tok in func.packagenames
331 | ]
332 | + [name]
333 | ) % {"expr": self.function_argspec(func, **kwargs)}
334 |
335 | def visit_concat_func(self, func, **kwargs):
336 | arg_sql = " || ".join(self.process(arg, **kwargs) for arg in func.clauses)
337 | return arg_sql
338 |
339 | def _is_bound_to_nullable_column(self, bind_name: str) -> bool:
340 | if bind_name in self.column_keys and hasattr(self.compile_state, "dml_table"):
341 | if bind_name in self.compile_state.dml_table.c:
342 | column = self.compile_state.dml_table.c[bind_name]
343 | return column.nullable and not column.primary_key
344 | return False
345 |
346 | def _guess_bound_variable_type_by_parameters(
347 | self, bind, post_compile_bind_values: list
348 | ) -> Optional[sa.types.TypeEngine]:
349 | bind_type = bind.type
350 | if bind.expanding or (isinstance(bind.type, sa.types.NullType) and post_compile_bind_values):
351 | not_null_values = [v for v in post_compile_bind_values if v is not None]
352 | if not_null_values:
353 | bind_type = _bindparam("", not_null_values[0]).type
354 |
355 | if isinstance(bind_type, sa.types.NullType):
356 | return None
357 |
358 | return bind_type
359 |
360 | def _get_expanding_bind_names(self, bind_name: str, parameters_values: Mapping[str, List[Any]]) -> List[Any]:
361 | expanding_bind_names = []
362 | for parameter_name in parameters_values:
363 | parameter_bind_name = "_".join(parameter_name.split("_")[:-1])
364 | if parameter_bind_name == bind_name:
365 | expanding_bind_names.append(parameter_name)
366 | return expanding_bind_names
367 |
368 | def render_bind_cast(self, type_, dbapi_type, sqltext):
369 | pass
370 |
371 | def get_bind_types(
372 | self, post_compile_parameters: Optional[Union[Sequence[Mapping[str, Any]], Mapping[str, Any]]]
373 | ) -> Dict[str, Union[ydb.PrimitiveType, ydb.AbstractTypeBuilder]]:
374 | """
375 | This method extracts information about bound variables from the table definition and parameters.
376 | """
377 | if isinstance(post_compile_parameters, collections.abc.Mapping):
378 | post_compile_parameters = [post_compile_parameters]
379 |
380 | parameters_values = collections.defaultdict(list)
381 | for parameters_entry in post_compile_parameters:
382 | for parameter_name, parameter_value in parameters_entry.items():
383 | parameters_values[parameter_name].append(parameter_value)
384 |
385 | parameter_types = {}
386 | for bind_name in self.bind_names.values():
387 | bind = self.binds[bind_name]
388 |
389 | if bind.literal_execute:
390 | continue
391 |
392 | if not bind.expanding:
393 | post_compile_bind_names = [bind_name]
394 | post_compile_bind_values = parameters_values[bind_name]
395 | else:
396 | post_compile_bind_names = self._get_expanding_bind_names(bind_name, parameters_values)
397 | post_compile_bind_values = []
398 | for parameter_name, parameter_values in parameters_values.items():
399 | if parameter_name in post_compile_bind_names:
400 | post_compile_bind_values.extend(parameter_values)
401 |
402 | is_optional = self._is_bound_to_nullable_column(bind_name)
403 | if not post_compile_bind_values or None in post_compile_bind_values:
404 | is_optional = True
405 |
406 | bind_type = self._guess_bound_variable_type_by_parameters(bind, post_compile_bind_values)
407 |
408 | if bind_type:
409 | for post_compile_bind_name in post_compile_bind_names:
410 | parameter_types[post_compile_bind_name] = self._type_compiler_cls(self.dialect).get_ydb_type(
411 | bind_type, is_optional
412 | )
413 |
414 | return parameter_types
415 |
416 | def _maybe_cast(
417 | self,
418 | element: Any,
419 | cast_to: Type[sa.types.TypeEngine],
420 | skip_types: Optional[Tuple[Type[sa.types.TypeEngine], ...]] = None,
421 | ) -> Any:
422 | if not skip_types:
423 | skip_types = (cast_to,)
424 | if cast_to not in skip_types:
425 | skip_types = (*skip_types, cast_to)
426 | if not hasattr(element, "type") or not isinstance(element.type, skip_types):
427 | return _cast(element, cast_to)
428 | return element
429 |
430 |
431 | class BaseYqlDDLCompiler(DDLCompiler):
432 | def visit_create_index(self, create: ddl.CreateIndex, **kw) -> str:
433 | index: sa.Index = create.element
434 | ydb_opts = index.dialect_options.get("ydb", {})
435 |
436 | self._verify_index_table(index)
437 |
438 | if index.name is None:
439 | raise CompileError("ADD INDEX requires that the index has a name")
440 |
441 | table_name = self.preparer.format_table(index.table)
442 | index_name = self._prepared_index_name(index)
443 |
444 | text = f"ALTER TABLE {table_name} ADD INDEX {index_name} GLOBAL"
445 |
446 | text += " SYNC" if not ydb_opts.get("async", False) else " ASYNC"
447 |
448 | columns = [self.preparer.format_column(col) for col in index.columns.values()]
449 | cover_columns = [
450 | col if isinstance(col, str) else self.preparer.format_column(col) for col in ydb_opts.get("cover", [])
451 | ]
452 | cover_columns = list(dict.fromkeys(cover_columns)) # dict preserves order
453 |
454 | text += " ON (" + ", ".join(columns) + ")"
455 |
456 | if cover_columns:
457 | text += " COVER (" + ", ".join(cover_columns) + ")"
458 |
459 | return text
460 |
461 | def visit_drop_index(self, drop: ddl.DropIndex, **kw) -> str:
462 | index: sa.Index = drop.element
463 |
464 | self._verify_index_table(index)
465 |
466 | table_name = self.preparer.format_table(index.table)
467 | index_name = self._prepared_index_name(index)
468 |
469 | return f"ALTER TABLE {table_name} DROP INDEX {index_name}"
470 |
471 | def post_create_table(self, table: sa.Table) -> str:
472 | ydb_opts = table.dialect_options["ydb"]
473 | with_clause_list = self._render_table_partitioning_settings(ydb_opts)
474 | if with_clause_list:
475 | with_clause_text = ",\n".join(with_clause_list)
476 | return f"\nWITH (\n\t{with_clause_text}\n)"
477 | return ""
478 |
479 | def _render_table_partitioning_settings(self, ydb_opts: Dict[str, Any]) -> List[str]:
480 | table_partitioning_settings = []
481 | if ydb_opts["auto_partitioning_by_size"] is not None:
482 | auto_partitioning_by_size = "ENABLED" if ydb_opts["auto_partitioning_by_size"] else "DISABLED"
483 | table_partitioning_settings.append(f"AUTO_PARTITIONING_BY_SIZE = {auto_partitioning_by_size}")
484 | if ydb_opts["auto_partitioning_by_load"] is not None:
485 | auto_partitioning_by_load = "ENABLED" if ydb_opts["auto_partitioning_by_load"] else "DISABLED"
486 | table_partitioning_settings.append(f"AUTO_PARTITIONING_BY_LOAD = {auto_partitioning_by_load}")
487 | if ydb_opts["auto_partitioning_partition_size_mb"] is not None:
488 | table_partitioning_settings.append(
489 | f"AUTO_PARTITIONING_PARTITION_SIZE_MB = {ydb_opts['auto_partitioning_partition_size_mb']}"
490 | )
491 | if ydb_opts["auto_partitioning_min_partitions_count"] is not None:
492 | table_partitioning_settings.append(
493 | f"AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = {ydb_opts['auto_partitioning_min_partitions_count']}"
494 | )
495 | if ydb_opts["auto_partitioning_max_partitions_count"] is not None:
496 | table_partitioning_settings.append(
497 | f"AUTO_PARTITIONING_MAX_PARTITIONS_COUNT = {ydb_opts['auto_partitioning_max_partitions_count']}"
498 | )
499 | if ydb_opts["uniform_partitions"] is not None:
500 | table_partitioning_settings.append(f"UNIFORM_PARTITIONS = {ydb_opts['uniform_partitions']}")
501 | if ydb_opts["partition_at_keys"] is not None:
502 | table_partitioning_settings.append(f"PARTITION_AT_KEYS = {ydb_opts['partition_at_keys']}")
503 | return table_partitioning_settings
504 |
505 |
506 | class BaseYqlIdentifierPreparer(IdentifierPreparer):
507 | def __init__(self, dialect):
508 | super(BaseYqlIdentifierPreparer, self).__init__(
509 | dialect,
510 | initial_quote="`",
511 | final_quote="`",
512 | )
513 |
514 | def format_index(self, index: sa.Index) -> str:
515 | return super().format_index(index).replace("/", "_")
516 |
--------------------------------------------------------------------------------
/test/test_suite.py:
--------------------------------------------------------------------------------
1 | import ctypes
2 | import datetime
3 | import decimal
4 |
5 | import pytest
6 | import sqlalchemy as sa
7 | import sqlalchemy.testing.suite.test_types
8 | from sqlalchemy.testing import is_false, is_true
9 | from sqlalchemy.testing.suite import * # noqa: F401, F403
10 | from sqlalchemy.testing.suite import (
11 | Column,
12 | Integer,
13 | MetaData,
14 | String,
15 | Table,
16 | column,
17 | config,
18 | eq_,
19 | exists,
20 | fixtures,
21 | func,
22 | inspect,
23 | literal_column,
24 | provide_metadata,
25 | requirements,
26 | select,
27 | testing,
28 | union,
29 | )
30 | from sqlalchemy.testing.suite.test_ddl import (
31 | LongNameBlowoutTest as _LongNameBlowoutTest,
32 | )
33 | from sqlalchemy.testing.suite.test_dialect import (
34 | DifficultParametersTest as _DifficultParametersTest,
35 | )
36 | from sqlalchemy.testing.suite.test_dialect import EscapingTest as _EscapingTest
37 | from sqlalchemy.testing.suite.test_insert import (
38 | InsertBehaviorTest as _InsertBehaviorTest,
39 | )
40 | from sqlalchemy.testing.suite.test_reflection import (
41 | ComponentReflectionTest as _ComponentReflectionTest,
42 | )
43 | from sqlalchemy.testing.suite.test_reflection import (
44 | ComponentReflectionTestExtra as _ComponentReflectionTestExtra,
45 | )
46 | from sqlalchemy.testing.suite.test_reflection import (
47 | CompositeKeyReflectionTest as _CompositeKeyReflectionTest,
48 | )
49 | from sqlalchemy.testing.suite.test_reflection import HasIndexTest as _HasIndexTest
50 | from sqlalchemy.testing.suite.test_reflection import HasTableTest as _HasTableTest
51 | from sqlalchemy.testing.suite.test_reflection import (
52 | QuotedNameArgumentTest as _QuotedNameArgumentTest,
53 | )
54 | from sqlalchemy.testing.suite.test_results import RowFetchTest as _RowFetchTest
55 | from sqlalchemy.testing.suite.test_select import ExistsTest as _ExistsTest
56 | from sqlalchemy.testing.suite.test_select import (
57 | FetchLimitOffsetTest as _FetchLimitOffsetTest,
58 | )
59 | from sqlalchemy.testing.suite.test_select import JoinTest as _JoinTest
60 | from sqlalchemy.testing.suite.test_select import LikeFunctionsTest as _LikeFunctionsTest
61 | from sqlalchemy.testing.suite.test_select import OrderByLabelTest as _OrderByLabelTest
62 | from sqlalchemy.testing.suite.test_types import BinaryTest as _BinaryTest
63 | from sqlalchemy.testing.suite.test_types import DateTest as _DateTest
64 | from sqlalchemy.testing.suite.test_types import (
65 | DateTimeCoercedToDateTimeTest as _DateTimeCoercedToDateTimeTest,
66 | )
67 | from sqlalchemy.testing.suite.test_types import (
68 | DateTimeMicrosecondsTest as _DateTimeMicrosecondsTest,
69 | )
70 | from sqlalchemy.testing.suite.test_types import DateTimeTest as _DateTimeTest
71 | from sqlalchemy.testing.suite.test_types import IntegerTest as _IntegerTest
72 | from sqlalchemy.testing.suite.test_types import JSONTest as _JSONTest
73 |
74 | from sqlalchemy.testing.suite.test_types import NumericTest as _NumericTest
75 | from sqlalchemy.testing.suite.test_types import StringTest as _StringTest
76 | from sqlalchemy.testing.suite.test_types import (
77 | TimeMicrosecondsTest as _TimeMicrosecondsTest,
78 | )
79 | from sqlalchemy.testing.suite.test_types import (
80 | TimestampMicrosecondsTest as _TimestampMicrosecondsTest,
81 | )
82 | from sqlalchemy.testing.suite.test_types import TimeTest as _TimeTest
83 |
84 | from ydb_sqlalchemy.sqlalchemy import types as ydb_sa_types
85 |
86 | test_types_suite = sqlalchemy.testing.suite.test_types
87 | col_creator = test_types_suite.Column
88 |
89 |
90 | OLD_SA = sa.__version__ < "2."
91 |
92 |
93 | def column_getter(*args, **kwargs):
94 | col = col_creator(*args, **kwargs)
95 | if col.name == "x":
96 | col.primary_key = True
97 | return col
98 |
99 |
100 | test_types_suite.Column = column_getter
101 |
102 |
103 | class ComponentReflectionTest(_ComponentReflectionTest):
104 | def _check_list(self, result, exp, req_keys=None, msg=None):
105 | try:
106 | return super()._check_list(result, exp, req_keys, msg)
107 | except AssertionError as err:
108 | err_info = err.args[0]
109 | if "nullable" in err_info:
110 | return "We changed nullable in define_reflected_tables method so won't check it."
111 | if "constrained_columns" in err_info and "contains one more item: 'data'" in err_info:
112 | return "We changed primary_keys in define_reflected_tables method so this will fail"
113 | raise
114 |
115 | @classmethod
116 | def define_reflected_tables(cls, metadata, schema):
117 | Table(
118 | "users",
119 | metadata,
120 | Column("user_id", sa.INT, primary_key=True),
121 | Column("test1", sa.CHAR(5)),
122 | Column("test2", sa.Float()),
123 | Column("parent_user_id", sa.Integer),
124 | schema=schema,
125 | test_needs_fk=True,
126 | )
127 |
128 | Table(
129 | "dingalings",
130 | metadata,
131 | Column("dingaling_id", sa.Integer, primary_key=True),
132 | Column("address_id", sa.Integer),
133 | Column("id_user", sa.Integer),
134 | Column("data", sa.String(30)),
135 | schema=schema,
136 | test_needs_fk=True,
137 | )
138 |
139 | Table(
140 | "email_addresses",
141 | metadata,
142 | Column("address_id", sa.Integer, primary_key=True),
143 | Column("remote_user_id", sa.Integer),
144 | Column("email_address", sa.String(20)),
145 | schema=schema,
146 | test_needs_fk=True,
147 | )
148 |
149 | Table(
150 | "comment_test",
151 | metadata,
152 | Column("id", sa.Integer, primary_key=True, comment="id comment"),
153 | Column("data", sa.String(20), comment="data % comment"),
154 | Column("d2", sa.String(20), comment=r"""Comment types type speedily ' " \ '' Fun!"""),
155 | schema=schema,
156 | comment=r"""the test % ' " \ table comment""",
157 | )
158 |
159 | Table(
160 | "no_constraints",
161 | metadata,
162 | Column("data", sa.String(20), primary_key=True, nullable=True),
163 | schema=schema,
164 | )
165 |
166 | @pytest.mark.skip("views unsupported")
167 | def test_get_view_names(self, connection, use_schema):
168 | pass
169 |
170 | def test_metadata(self, connection, **kwargs):
171 | m = MetaData()
172 | m.reflect(connection, resolve_fks=False)
173 |
174 | insp = inspect(connection)
175 | tables = insp.get_table_names()
176 | eq_(sorted(m.tables), sorted(tables))
177 |
178 |
179 | class CompositeKeyReflectionTest(_CompositeKeyReflectionTest):
180 | @classmethod
181 | def define_tables(cls, metadata):
182 | Table(
183 | "tb1",
184 | metadata,
185 | Column("id", Integer),
186 | Column("attr", Integer),
187 | Column("name", sa.VARCHAR(20)),
188 | # named pk unsupported
189 | sa.PrimaryKeyConstraint("name", "id", "attr"),
190 | schema=None,
191 | test_needs_fk=True,
192 | )
193 |
194 |
195 | class ComponentReflectionTestExtra(_ComponentReflectionTestExtra):
196 | def _type_round_trip(self, connection, metadata, *types):
197 | t = Table(
198 | "t",
199 | metadata,
200 | # table without pk unsupported
201 | *[Column("t%d" % i, type_, primary_key=True) for i, type_ in enumerate(types)],
202 | )
203 | t.create(connection)
204 | return [c["type"] for c in inspect(connection).get_columns("t")]
205 |
206 | @pytest.mark.skip("YDB: Only Decimal(22,9) is supported for table columns")
207 | def test_numeric_reflection(self):
208 | pass
209 |
210 | @pytest.mark.skip("TODO: varchar with length unsupported")
211 | def test_varchar_reflection(self):
212 | pass
213 |
214 | @testing.requires.table_reflection
215 | def test_nullable_reflection(self, connection, metadata):
216 | t = Table(
217 | "t",
218 | metadata,
219 | # table without pk unsupported
220 | Column("a", Integer, nullable=True, primary_key=True),
221 | Column("b", Integer, nullable=False, primary_key=True),
222 | )
223 | t.create(connection)
224 | eq_(
225 | {col["name"]: col["nullable"] for col in inspect(connection).get_columns("t")},
226 | {"a": True, "b": False},
227 | )
228 |
229 |
230 | class HasTableTest(_HasTableTest):
231 | @classmethod
232 | def define_tables(cls, metadata):
233 | Table(
234 | "test_table",
235 | metadata,
236 | Column("id", Integer, primary_key=True),
237 | Column("data", String(50)),
238 | )
239 |
240 | def test_has_table_cache(self, metadata):
241 | insp = inspect(config.db)
242 | is_true(insp.has_table("test_table"))
243 | # table without pk unsupported
244 | nt = Table("new_table", metadata, Column("col", Integer, primary_key=True))
245 | is_false(insp.has_table("new_table"))
246 | nt.create(config.db)
247 | try:
248 | is_false(insp.has_table("new_table"))
249 | insp.clear_cache()
250 | is_true(insp.has_table("new_table"))
251 | finally:
252 | nt.drop(config.db)
253 |
254 |
255 | @pytest.mark.skip("CREATE INDEX syntax unsupported")
256 | class HasIndexTest(_HasIndexTest):
257 | pass
258 |
259 |
260 | @pytest.mark.skip("quotes unsupported in table names")
261 | class QuotedNameArgumentTest(_QuotedNameArgumentTest):
262 | pass
263 |
264 |
265 | class IntegerTest(_IntegerTest):
266 | @pytest.mark.skip("YQL doesn't support select with where without from")
267 | def test_huge_int_auto_accommodation(self, connection, intvalue):
268 | pass
269 |
270 |
271 | @pytest.mark.skip("Use YdbDecimalTest for Decimal type testing")
272 | class NumericTest(_NumericTest):
273 | # SqlAlchemy maybe eat Decimal and throw Double
274 | pass
275 |
276 |
277 | @pytest.mark.skip("TODO: see issue #13")
278 | class BinaryTest(_BinaryTest):
279 | pass
280 |
281 |
282 | if not OLD_SA:
283 | from sqlalchemy.testing.suite.test_types import TrueDivTest as _TrueDivTest
284 |
285 | class TrueDivTest(_TrueDivTest):
286 | @pytest.mark.skip("Unsupported builtin: FLOOR")
287 | def test_floordiv_numeric(self, connection, left, right, expected):
288 | pass
289 |
290 | @pytest.mark.skip("Truediv unsupported for int")
291 | def test_truediv_integer(self, connection, left, right, expected):
292 | pass
293 |
294 | @pytest.mark.skip("Truediv unsupported for int")
295 | def test_truediv_integer_bound(self, connection):
296 | pass
297 |
298 | @pytest.mark.skip("Numeric is not Decimal")
299 | def test_truediv_numeric(self):
300 | # SqlAlchemy maybe eat Decimal and throw Double
301 | pass
302 |
303 | @testing.combinations(("6.25", "2.5", 2.5), argnames="left, right, expected")
304 | def test_truediv_float(self, connection, left, right, expected):
305 | eq_(
306 | connection.scalar(
307 | select(literal_column(left, type_=sa.Float()) / literal_column(right, type_=sa.Float()))
308 | ),
309 | expected,
310 | )
311 |
312 |
313 | class ExistsTest(_ExistsTest):
314 | """
315 | YDB says: Filtering is not allowed without FROM so rewrite queries
316 | """
317 |
318 | def test_select_exists(self, connection):
319 | stuff = self.tables.stuff
320 | eq_(connection.execute(select(exists().where(stuff.c.data == "some data"))).fetchall(), [(True,)])
321 |
322 | def test_select_exists_false(self, connection):
323 | stuff = self.tables.stuff
324 | eq_(connection.execute(select(exists().where(stuff.c.data == "no data"))).fetchall(), [(False,)])
325 |
326 |
327 | class LikeFunctionsTest(_LikeFunctionsTest):
328 | @testing.requires.regexp_match
329 | def test_not_regexp_match(self):
330 | col = self.tables.some_table.c.data
331 | # YDB fetch NULL columns too
332 | self._test(~col.regexp_match("a.cde"), {2, 3, 4, 7, 8, 10, 11})
333 |
334 |
335 | class EscapingTest(_EscapingTest):
336 | @provide_metadata
337 | def test_percent_sign_round_trip(self):
338 | """test that the DBAPI accommodates for escaped / nonescaped
339 | percent signs in a way that matches the compiler
340 |
341 | """
342 | m = self.metadata
343 | # table without pk unsupported
344 | t = Table("t", m, Column("data", String(50), primary_key=True))
345 | t.create(config.db)
346 | with config.db.begin() as conn:
347 | conn.execute(t.insert(), dict(data="some % value"))
348 | conn.execute(t.insert(), dict(data="some %% other value"))
349 |
350 | eq_(conn.scalar(select(t.c.data).where(t.c.data == literal_column("'some % value'"))), "some % value")
351 |
352 | eq_(
353 | conn.scalar(select(t.c.data).where(t.c.data == literal_column("'some %% other value'"))),
354 | "some %% other value",
355 | )
356 |
357 |
358 | @pytest.mark.skip("unsupported tricky names for columns")
359 | class DifficultParametersTest(_DifficultParametersTest):
360 | pass
361 |
362 |
363 | @pytest.mark.skip("JOIN ON expression must be a conjunction of equality predicates")
364 | class JoinTest(_JoinTest):
365 | pass
366 |
367 |
368 | class OrderByLabelTest(_OrderByLabelTest):
369 | def test_composed_multiple(self):
370 | table = self.tables.some_table
371 | lx = (table.c.x + table.c.y).label("lx")
372 | ly = (table.c.q + table.c.p).label("ly") # unknown builtin: lower
373 | self._assert_result(
374 | select(lx, ly).order_by(lx, ly.desc()),
375 | [(3, "q1p3"), (5, "q2p2"), (7, "q3p1")],
376 | )
377 |
378 | @testing.requires.group_by_complex_expression
379 | def test_group_by_composed(self):
380 | """
381 | YDB says: column `some_table.x` must either be a key column in GROUP BY
382 | or it should be used in aggregation function
383 | """
384 | table = self.tables.some_table
385 | expr = (table.c.x + table.c.y).label("lx")
386 | stmt = select(func.count(table.c.id), column("lx")).group_by(expr).order_by(column("lx"))
387 | self._assert_result(stmt, [(1, 3), (1, 5), (1, 7)])
388 |
389 |
390 | class FetchLimitOffsetTest(_FetchLimitOffsetTest):
391 | def test_limit_render_multiple_times(self, connection):
392 | """
393 | YQL does not support scalar subquery, so test was refiled with simple subquery
394 | """
395 | table = self.tables.some_table
396 | stmt = select(table.c.id).limit(1).subquery()
397 |
398 | u = union(select(stmt), select(stmt)).subquery().select()
399 |
400 | self._assert_result(
401 | connection,
402 | u,
403 | [
404 | (1,),
405 | (1,),
406 | ],
407 | )
408 |
409 |
410 | class InsertBehaviorTest(_InsertBehaviorTest):
411 | @pytest.mark.skip("autoincrement unsupported")
412 | def test_insert_from_select_autoinc(self, connection):
413 | pass
414 |
415 | @pytest.mark.skip("autoincrement unsupported")
416 | def test_insert_from_select_autoinc_no_rows(self, connection):
417 | pass
418 |
419 | @pytest.mark.skip("implicit PK values unsupported")
420 | def test_no_results_for_non_returning_insert(self, connection):
421 | pass
422 |
423 |
424 | class DateTest(_DateTest):
425 | run_dispose_bind = "once"
426 |
427 |
428 | class Date32Test(_DateTest):
429 | run_dispose_bind = "once"
430 | datatype = ydb_sa_types.YqlDate32
431 | data = datetime.date(1969, 1, 1)
432 |
433 | @pytest.mark.skip("Default binding for DATE is not compatible with Date32")
434 | def test_select_direct(self, connection):
435 | pass
436 |
437 |
438 | class DateTimeMicrosecondsTest(_DateTimeMicrosecondsTest):
439 | run_dispose_bind = "once"
440 |
441 |
442 | class DateTimeTest(_DateTimeTest):
443 | run_dispose_bind = "once"
444 |
445 |
446 | class DateTime64Test(_DateTimeTest):
447 | datatype = ydb_sa_types.YqlDateTime64
448 | data = datetime.datetime(1969, 10, 15, 12, 57, 18)
449 | run_dispose_bind = "once"
450 |
451 | @pytest.mark.skip("Default binding for DATETIME is not compatible with DateTime64")
452 | def test_select_direct(self, connection):
453 | pass
454 |
455 |
456 | class TimestampMicrosecondsTest(_TimestampMicrosecondsTest):
457 | run_dispose_bind = "once"
458 |
459 |
460 | class Timestamp64MicrosecondsTest(_TimestampMicrosecondsTest):
461 | run_dispose_bind = "once"
462 | datatype = ydb_sa_types.YqlTimestamp64
463 | data = datetime.datetime(1969, 10, 15, 12, 57, 18, 396)
464 |
465 | @pytest.mark.skip("Default binding for TIMESTAMP is not compatible with Timestamp64")
466 | def test_select_direct(self, connection):
467 | pass
468 |
469 |
470 | @pytest.mark.skip("unsupported Time data type")
471 | class TimeTest(_TimeTest):
472 | pass
473 |
474 |
475 | class JSONTest(_JSONTest):
476 | @classmethod
477 | def define_tables(cls, metadata):
478 | Table(
479 | "data_table",
480 | metadata,
481 | Column("id", Integer, primary_key=True, default=1),
482 | Column("name", String(30), primary_key=True, nullable=False),
483 | Column("data", cls.datatype, nullable=False),
484 | Column("nulldata", cls.datatype(none_as_null=True)),
485 | )
486 |
487 | def _json_value_insert(self, connection, datatype, value, data_element):
488 | if datatype == "float" and value is not None:
489 | # As python's float is stored as C double, it needs to be shrank
490 | value = ctypes.c_float(value).value
491 | return super()._json_value_insert(connection, datatype, value, data_element)
492 |
493 |
494 | class StringTest(_StringTest):
495 | @requirements.unbounded_varchar
496 | def test_nolength_string(self):
497 | metadata = MetaData()
498 | # table without pk unsupported
499 | foo = Table("foo", metadata, Column("one", String, primary_key=True))
500 | foo.create(config.db)
501 | foo.drop(config.db)
502 |
503 |
504 | class ContainerTypesTest(fixtures.TablesTest):
505 | @classmethod
506 | def define_tables(cls, metadata):
507 | Table(
508 | "container_types_test",
509 | metadata,
510 | Column("id", Integer),
511 | sa.PrimaryKeyConstraint("id"),
512 | schema=None,
513 | test_needs_fk=True,
514 | )
515 |
516 | def test_ARRAY_bind_variable(self, connection):
517 | table = self.tables.container_types_test
518 |
519 | connection.execute(sa.insert(table).values([{"id": 1}, {"id": 2}, {"id": 3}]))
520 |
521 | stmt = select(table.c.id).where(table.c.id.in_(sa.bindparam("id", type_=sa.ARRAY(sa.Integer))))
522 |
523 | eq_(connection.execute(stmt, {"id": [1, 2]}).fetchall(), [(1,), (2,)])
524 |
525 | def test_list_type_bind_variable(self, connection):
526 | table = self.tables.container_types_test
527 |
528 | connection.execute(sa.insert(table).values([{"id": 1}, {"id": 2}, {"id": 3}]))
529 |
530 | stmt = select(table.c.id).where(table.c.id.in_(sa.bindparam("id", type_=ydb_sa_types.ListType(sa.Integer))))
531 |
532 | eq_(connection.execute(stmt, {"id": [1, 2]}).fetchall(), [(1,), (2,)])
533 |
534 | def test_struct_type_bind_variable(self, connection):
535 | table = self.tables.container_types_test
536 |
537 | connection.execute(sa.insert(table).values([{"id": 1}, {"id": 2}, {"id": 3}]))
538 |
539 | stmt = select(table.c.id).where(
540 | table.c.id
541 | == sa.text(":struct.id").bindparams(
542 | sa.bindparam("struct", type_=ydb_sa_types.StructType({"id": sa.Integer})),
543 | )
544 | )
545 |
546 | eq_(connection.scalar(stmt, {"struct": {"id": 1}}), 1)
547 |
548 | def test_struct_type_bind_variable_text(self, connection):
549 | rs = connection.execute(
550 | sa.text("SELECT :struct.x + :struct.y").bindparams(
551 | sa.bindparam(
552 | key="struct",
553 | type_=ydb_sa_types.StructType({"x": sa.Integer, "y": sa.Integer}),
554 | value={"x": 1, "y": 2},
555 | )
556 | )
557 | )
558 | assert rs.scalar() == 3
559 |
560 | def test_from_as_table(self, connection):
561 | table = self.tables.container_types_test
562 |
563 | connection.execute(
564 | sa.insert(table).from_select(
565 | ["id"],
566 | sa.select(sa.column("id")).select_from(
567 | sa.func.as_table(
568 | sa.bindparam(
569 | "data",
570 | value=[{"id": 1}, {"id": 2}, {"id": 3}],
571 | type_=ydb_sa_types.ListType(ydb_sa_types.StructType({"id": sa.Integer})),
572 | )
573 | )
574 | ),
575 | )
576 | )
577 |
578 | eq_(connection.execute(sa.select(table)).fetchall(), [(1,), (2,), (3,)])
579 |
580 | def test_tuple_list_type_bind_variable_text(self, connection):
581 | table = self.tables.container_types_test
582 |
583 | connection.execute(sa.insert(table).values([{"id": 1}, {"id": 2}, {"id": 3}]))
584 |
585 | stmt = select(table.c.id).where(
586 | sa.text("(id, id) IN :tuple_arr_var").bindparams(
587 | sa.bindparam("tuple_arr_var", type_=sa.ARRAY(sa.TupleType(sa.Integer, sa.Integer))),
588 | )
589 | )
590 |
591 | eq_(connection.execute(stmt, {"tuple_arr_var": [(1, 1), (2, 2)]}).fetchall(), [(1,), (2,)])
592 |
593 |
594 | class ConcatTest(fixtures.TablesTest):
595 | @classmethod
596 | def define_tables(cls, metadata):
597 | Table(
598 | "concat_func_test",
599 | metadata,
600 | Column("A", String),
601 | Column("B", String),
602 | sa.PrimaryKeyConstraint("A"),
603 | schema=None,
604 | test_needs_fk=True,
605 | )
606 |
607 | def test_concat_func(self, connection):
608 | table = self.tables.concat_func_test
609 |
610 | connection.execute(sa.insert(table).values([{"A": "A", "B": "B"}]))
611 |
612 | stmt = select(func.concat(table.c.A, " ", table.c.B)).limit(1)
613 |
614 | eq_(connection.scalar(stmt), "A B")
615 |
616 |
617 | if not OLD_SA:
618 | from sqlalchemy.testing.suite.test_types import NativeUUIDTest as _NativeUUIDTest
619 |
620 | @pytest.mark.skip("uuid unsupported for columns")
621 | class NativeUUIDTest(_NativeUUIDTest):
622 | pass
623 |
624 |
625 | @pytest.mark.skip("unsupported Time data type")
626 | class TimeMicrosecondsTest(_TimeMicrosecondsTest):
627 | pass
628 |
629 |
630 | @pytest.mark.skip("unsupported coerce dates from datetime")
631 | class DateTimeCoercedToDateTimeTest(_DateTimeCoercedToDateTimeTest):
632 | pass
633 |
634 |
635 | @pytest.mark.skip("named constraints unsupported")
636 | class LongNameBlowoutTest(_LongNameBlowoutTest):
637 | pass
638 |
639 |
640 | class RowFetchTest(_RowFetchTest):
641 | @pytest.mark.skip("scalar subquery unsupported")
642 | def test_row_w_scalar_select(self, connection):
643 | pass
644 |
645 |
646 | class DecimalTest(fixtures.TablesTest):
647 | """Tests for YDB Decimal type using standard sa.DECIMAL"""
648 |
649 | @classmethod
650 | def define_tables(cls, metadata):
651 | Table(
652 | "decimal_test",
653 | metadata,
654 | Column("id", Integer, primary_key=True),
655 | Column("decimal_default", sa.DECIMAL), # Default: precision=22, scale=9
656 | Column("decimal_custom", sa.DECIMAL(precision=10, scale=2)),
657 | Column("decimal_as_float", sa.DECIMAL(asdecimal=False)), # Should behave like Float
658 | )
659 |
660 | def test_decimal_basic_operations(self, connection):
661 | """Test basic insert and select operations with Decimal"""
662 |
663 | table = self.tables.decimal_test
664 |
665 | test_values = [
666 | decimal.Decimal("1"),
667 | decimal.Decimal("2"),
668 | decimal.Decimal("3"),
669 | ]
670 |
671 | # Insert test values
672 | for i, val in enumerate(test_values):
673 | connection.execute(table.insert().values(id=i + 1, decimal_default=val))
674 |
675 | # Select and verify
676 | results = connection.execute(select(table.c.decimal_default).order_by(table.c.id)).fetchall()
677 |
678 | for i, (result,) in enumerate(results):
679 | expected = test_values[i]
680 | assert isinstance(result, decimal.Decimal)
681 | assert result == expected
682 |
683 | def test_decimal_with_precision_scale(self, connection):
684 | """Test Decimal with specific precision and scale"""
685 |
686 | table = self.tables.decimal_test
687 |
688 | # Test value that fits precision(10, 2)
689 | test_value = decimal.Decimal("12345678.99")
690 |
691 | connection.execute(table.insert().values(id=100, decimal_custom=test_value))
692 |
693 | result = connection.scalar(select(table.c.decimal_custom).where(table.c.id == 100))
694 |
695 | assert isinstance(result, decimal.Decimal)
696 | assert result == test_value
697 |
698 | def test_decimal_literal_rendering(self, connection):
699 | """Test literal rendering of Decimal values"""
700 | from sqlalchemy import literal
701 |
702 | table = self.tables.decimal_test
703 |
704 | # Test literal in INSERT
705 | test_value = decimal.Decimal("999.99")
706 |
707 | connection.execute(table.insert().values(id=300, decimal_default=literal(test_value, sa.DECIMAL())))
708 |
709 | result = connection.scalar(select(table.c.decimal_default).where(table.c.id == 300))
710 |
711 | assert isinstance(result, decimal.Decimal)
712 | assert result == test_value
713 |
714 | def test_decimal_overflow(self, connection):
715 | """Test behavior when precision is exceeded"""
716 |
717 | table = self.tables.decimal_test
718 |
719 | # Try to insert value that exceeds precision=10, scale=2
720 | overflow_value = decimal.Decimal("99999.99999")
721 |
722 | with pytest.raises(Exception): # Should raise some kind of database error
723 | connection.execute(table.insert().values(id=500, decimal_custom=overflow_value))
724 | connection.commit()
725 |
726 | def test_decimal_asdecimal_false(self, connection):
727 | """Test DECIMAL with asdecimal=False (should return float)"""
728 |
729 | table = self.tables.decimal_test
730 |
731 | test_value = decimal.Decimal("123.45")
732 |
733 | connection.execute(table.insert().values(id=600, decimal_as_float=test_value))
734 |
735 | result = connection.scalar(select(table.c.decimal_as_float).where(table.c.id == 600))
736 |
737 | assert isinstance(result, float), f"Expected float, got {type(result)}"
738 | assert abs(result - 123.45) < 0.01
739 |
740 | def test_decimal_arithmetic(self, connection):
741 | """Test arithmetic operations with Decimal columns"""
742 |
743 | table = self.tables.decimal_test
744 |
745 | val1 = decimal.Decimal("100.50")
746 | val2 = decimal.Decimal("25.25")
747 |
748 | connection.execute(table.insert().values(id=900, decimal_default=val1))
749 | connection.execute(table.insert().values(id=901, decimal_default=val2))
750 |
751 | # Test various arithmetic operations
752 | addition_result = connection.scalar(
753 | select(table.c.decimal_default + decimal.Decimal("10.00")).where(table.c.id == 900)
754 | )
755 |
756 | subtraction_result = connection.scalar(
757 | select(table.c.decimal_default - decimal.Decimal("5.25")).where(table.c.id == 900)
758 | )
759 |
760 | multiplication_result = connection.scalar(
761 | select(table.c.decimal_default * decimal.Decimal("2.0")).where(table.c.id == 901)
762 | )
763 |
764 | division_result = connection.scalar(
765 | select(table.c.decimal_default / decimal.Decimal("2.0")).where(table.c.id == 901)
766 | )
767 |
768 | # Verify results
769 | assert abs(addition_result - decimal.Decimal("110.50")) < decimal.Decimal("0.01")
770 | assert abs(subtraction_result - decimal.Decimal("95.25")) < decimal.Decimal("0.01")
771 | assert abs(multiplication_result - decimal.Decimal("50.50")) < decimal.Decimal("0.01")
772 | assert abs(division_result - decimal.Decimal("12.625")) < decimal.Decimal("0.01")
773 |
774 | def test_decimal_comparison_operations(self, connection):
775 | """Test comparison operations with Decimal columns"""
776 |
777 | table = self.tables.decimal_test
778 |
779 | values = [
780 | decimal.Decimal("10.50"),
781 | decimal.Decimal("20.75"),
782 | decimal.Decimal("15.25"),
783 | ]
784 |
785 | for i, val in enumerate(values):
786 | connection.execute(table.insert().values(id=1000 + i, decimal_default=val))
787 |
788 | # Test various comparisons
789 | greater_than = connection.execute(
790 | select(table.c.id).where(table.c.decimal_default > decimal.Decimal("15.00")).order_by(table.c.id)
791 | ).fetchall()
792 |
793 | less_than = connection.execute(
794 | select(table.c.id).where(table.c.decimal_default < decimal.Decimal("15.00")).order_by(table.c.id)
795 | ).fetchall()
796 |
797 | equal_to = connection.execute(
798 | select(table.c.id).where(table.c.decimal_default == decimal.Decimal("15.25"))
799 | ).fetchall()
800 |
801 | between_values = connection.execute(
802 | select(table.c.id)
803 | .where(table.c.decimal_default.between(decimal.Decimal("15.00"), decimal.Decimal("21.00")))
804 | .order_by(table.c.id)
805 | ).fetchall()
806 |
807 | # Verify results
808 | assert len(greater_than) == 2 # 20.75 and 15.25
809 | assert len(less_than) == 1 # 10.50
810 | assert len(equal_to) == 1 # 15.25
811 | assert len(between_values) == 2 # 20.75 and 15.25
812 |
813 | def test_decimal_null_handling(self, connection):
814 | """Test NULL handling with Decimal columns"""
815 |
816 | table = self.tables.decimal_test
817 |
818 | # Insert NULL value
819 | connection.execute(table.insert().values(id=1100, decimal_default=None))
820 |
821 | # Insert non-NULL value for comparison
822 | connection.execute(table.insert().values(id=1101, decimal_default=decimal.Decimal("42.42")))
823 |
824 | # Test NULL retrieval
825 | null_result = connection.scalar(select(table.c.decimal_default).where(table.c.id == 1100))
826 |
827 | non_null_result = connection.scalar(select(table.c.decimal_default).where(table.c.id == 1101))
828 |
829 | assert null_result is None
830 | assert non_null_result == decimal.Decimal("42.42")
831 |
832 | # Test IS NULL / IS NOT NULL
833 | null_count = connection.scalar(select(func.count()).where(table.c.decimal_default.is_(None)))
834 |
835 | not_null_count = connection.scalar(select(func.count()).where(table.c.decimal_default.isnot(None)))
836 |
837 | # Should have at least 1 NULL and several non-NULL values from other tests
838 | assert null_count >= 1
839 | assert not_null_count >= 1
840 |
841 | def test_decimal_input_type_conversion(self, connection):
842 | """Test that bind_processor handles different input types correctly (float, string, int, Decimal)"""
843 |
844 | table = self.tables.decimal_test
845 |
846 | # Test different input types that should all be converted to Decimal
847 | test_cases = [
848 | (1400, 123.45, "float input"), # float
849 | (1401, "456.78", "string input"), # string
850 | (1402, decimal.Decimal("789.12"), "decimal input"), # already Decimal
851 | (1403, 100, "int input"), # int
852 | ]
853 |
854 | for test_id, input_value, description in test_cases:
855 | connection.execute(table.insert().values(id=test_id, decimal_default=input_value))
856 |
857 | result = connection.scalar(select(table.c.decimal_default).where(table.c.id == test_id))
858 |
859 | # All should be returned as Decimal
860 | assert isinstance(result, decimal.Decimal), f"Failed for {description}: got {type(result)}"
861 |
862 | # Verify the value is approximately correct
863 | expected = decimal.Decimal(str(input_value))
864 | error_str = f"Failed for {description}: expected {expected}, got {result}"
865 | assert abs(result - expected) < decimal.Decimal("0.01"), error_str
866 |
867 | def test_decimal_asdecimal_comparison(self, connection):
868 | """Test comparison between asdecimal=True and asdecimal=False behavior"""
869 |
870 | table = self.tables.decimal_test
871 |
872 | test_value = decimal.Decimal("999.123")
873 |
874 | # Insert same value into both columns
875 | connection.execute(
876 | table.insert().values(
877 | id=1500,
878 | decimal_default=test_value, # asdecimal=True (default)
879 | decimal_as_float=test_value, # asdecimal=False
880 | )
881 | )
882 |
883 | # Get results from both columns
884 | result_as_decimal = connection.scalar(select(table.c.decimal_default).where(table.c.id == 1500))
885 | result_as_float = connection.scalar(select(table.c.decimal_as_float).where(table.c.id == 1500))
886 |
887 | # Check types are different
888 | assert isinstance(result_as_decimal, decimal.Decimal), f"Expected Decimal, got {type(result_as_decimal)}"
889 | assert isinstance(result_as_float, float), f"Expected float, got {type(result_as_float)}"
890 |
891 | # Check values are approximately equal
892 | assert abs(result_as_decimal - test_value) < decimal.Decimal("0.001")
893 | assert abs(result_as_float - float(test_value)) < 0.001
894 |
895 | # Check that converting between them gives same value
896 | assert abs(float(result_as_decimal) - result_as_float) < 0.001
897 |
--------------------------------------------------------------------------------