├── aerich ├── py.typed ├── version.py ├── __main__.py ├── enums.py ├── exceptions.py ├── models.py ├── ddl │ ├── sqlite │ │ └── __init__.py │ ├── mysql │ │ └── __init__.py │ └── postgres │ │ └── __init__.py ├── coder.py ├── inspectdb │ ├── sqlite.py │ ├── mysql.py │ ├── postgres.py │ └── __init__.py ├── _compat.py └── __init__.py ├── tests ├── __init__.py ├── assets │ ├── fake │ │ ├── db.py │ │ ├── conftest_.py │ │ ├── settings.py │ │ └── _tests.py │ ├── m2m_comment │ │ ├── db.py │ │ ├── conftest_.py │ │ ├── models.py │ │ ├── models_2.py │ │ ├── settings.py │ │ └── _tests.py │ ├── postgres_vector │ │ ├── db.py │ │ ├── pyproject.toml │ │ ├── models.py │ │ └── settings.py │ ├── table_creations │ │ ├── db.py │ │ ├── conftest_.py │ │ ├── models.py │ │ ├── settings.py │ │ ├── models_2.py │ │ └── _tests.py │ ├── class_var_config │ │ ├── app │ │ │ ├── __init__.py │ │ │ ├── core │ │ │ │ ├── __init__.py │ │ │ │ └── config.py │ │ │ └── models.py │ │ ├── conftest_.py │ │ ├── _tests.py │ │ └── pyproject.toml │ ├── drop_field_unique │ │ ├── db.py │ │ ├── conftest_.py │ │ ├── models_2.py │ │ ├── models.py │ │ ├── models_3.py │ │ ├── models_5.py │ │ ├── models_4.py │ │ ├── settings.py │ │ └── _tests.py │ ├── per_app_migrations │ │ ├── auth │ │ │ ├── __init__.py │ │ │ └── models.py │ │ ├── polls │ │ │ ├── __init__.py │ │ │ └── models.py │ │ ├── conftest_.py │ │ ├── pyproject.toml │ │ ├── settings.py │ │ ├── models_2.py │ │ ├── models_3.py │ │ └── _tests.py │ ├── remove_constraint │ │ ├── db.py │ │ ├── conftest_.py │ │ ├── models_2.py │ │ ├── models.py │ │ ├── models_3.py │ │ ├── models_4.py │ │ ├── models_5.py │ │ ├── settings.py │ │ └── _tests.py │ ├── m2m_rescursive │ │ ├── settings.py │ │ ├── conftest_.py │ │ ├── models.py │ │ ├── models_2.py │ │ └── _tests.py │ ├── sqlite_migrate │ │ ├── settings.py │ │ ├── conftest_.py │ │ ├── models.py │ │ └── _tests.py │ ├── ignore_on_delete │ │ ├── conftest_.py │ │ ├── settings.py │ │ ├── _tests.py │ │ ├── models.py │ │ └── models_2.py │ ├── migrate_no_input │ │ ├── settings.py │ │ ├── models.py │ │ └── _tests.py │ ├── sqlite_old_style │ │ ├── conftest_.py │ │ ├── settings.py │ │ ├── example_db.sqlite3 │ │ ├── models.py │ │ ├── _migrations │ │ │ └── models │ │ │ │ ├── 1_20250405163135_update.py │ │ │ │ └── 0_20250405163033_init.py │ │ ├── _tests.py │ │ └── data.json │ ├── command_programmatically │ │ ├── settings.py │ │ ├── models.py │ │ ├── models_2.py │ │ └── _tests.py │ ├── custom_index_offline │ │ ├── conftest_.py │ │ ├── settings.py │ │ ├── models.py │ │ ├── models_3.py │ │ ├── models_2.py │ │ └── _tests.py │ ├── delete_model_with_m2m_field │ │ ├── conftest_.py │ │ ├── settings.py │ │ ├── models_3.py │ │ ├── models_2.py │ │ ├── models.py │ │ └── _tests.py │ ├── missing_aerich_models │ │ ├── models.py │ │ ├── other_models.py │ │ └── settings.py │ ├── settings.py │ ├── conftest_.py │ └── db.py ├── indexes.py ├── test_command.py ├── test_python_m.py ├── test_inspectdb.py ├── test_remove_unique_constraint.py ├── models_second.py ├── test_per_app_migrations.py ├── old_models.py ├── models.py ├── test_fake.py ├── test_cli.py ├── _utils.py └── test_ddl.py ├── .github ├── FUNDING.yml └── workflows │ ├── pypi.yml │ └── ci.yml ├── Makefile ├── conftest.py ├── .gitignore ├── pyproject.toml ├── README_RU.md ├── README.md └── LICENSE /aerich/py.typed: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/assets/fake/db.py: -------------------------------------------------------------------------------- 1 | ../db.py -------------------------------------------------------------------------------- /tests/assets/m2m_comment/db.py: -------------------------------------------------------------------------------- 1 | ../db.py -------------------------------------------------------------------------------- /aerich/version.py: -------------------------------------------------------------------------------- 1 | __version__ = "0.9.2" 2 | -------------------------------------------------------------------------------- /tests/assets/postgres_vector/db.py: -------------------------------------------------------------------------------- 1 | ../db.py -------------------------------------------------------------------------------- /tests/assets/table_creations/db.py: -------------------------------------------------------------------------------- 1 | ../db.py -------------------------------------------------------------------------------- /tests/assets/class_var_config/app/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/assets/drop_field_unique/db.py: -------------------------------------------------------------------------------- 1 | ../db.py -------------------------------------------------------------------------------- /tests/assets/fake/conftest_.py: -------------------------------------------------------------------------------- 1 | ../conftest_.py -------------------------------------------------------------------------------- /tests/assets/per_app_migrations/auth/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/assets/remove_constraint/db.py: -------------------------------------------------------------------------------- 1 | ../db.py -------------------------------------------------------------------------------- /tests/assets/class_var_config/app/core/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/assets/m2m_comment/conftest_.py: -------------------------------------------------------------------------------- 1 | ../conftest_.py -------------------------------------------------------------------------------- /tests/assets/m2m_rescursive/settings.py: -------------------------------------------------------------------------------- 1 | ../settings.py -------------------------------------------------------------------------------- /tests/assets/per_app_migrations/polls/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/assets/sqlite_migrate/settings.py: -------------------------------------------------------------------------------- 1 | ../settings.py -------------------------------------------------------------------------------- /tests/assets/class_var_config/conftest_.py: -------------------------------------------------------------------------------- 1 | ../conftest_.py -------------------------------------------------------------------------------- /tests/assets/drop_field_unique/conftest_.py: -------------------------------------------------------------------------------- 1 | ../conftest_.py -------------------------------------------------------------------------------- /tests/assets/ignore_on_delete/conftest_.py: -------------------------------------------------------------------------------- 1 | ../conftest_.py -------------------------------------------------------------------------------- /tests/assets/ignore_on_delete/settings.py: -------------------------------------------------------------------------------- 1 | ../settings.py -------------------------------------------------------------------------------- /tests/assets/m2m_rescursive/conftest_.py: -------------------------------------------------------------------------------- 1 | ../conftest_.py -------------------------------------------------------------------------------- /tests/assets/migrate_no_input/settings.py: -------------------------------------------------------------------------------- 1 | ../settings.py -------------------------------------------------------------------------------- /tests/assets/remove_constraint/conftest_.py: -------------------------------------------------------------------------------- 1 | ../conftest_.py -------------------------------------------------------------------------------- /tests/assets/sqlite_migrate/conftest_.py: -------------------------------------------------------------------------------- 1 | ../conftest_.py -------------------------------------------------------------------------------- /tests/assets/sqlite_old_style/conftest_.py: -------------------------------------------------------------------------------- 1 | ../conftest_.py -------------------------------------------------------------------------------- /tests/assets/sqlite_old_style/settings.py: -------------------------------------------------------------------------------- 1 | ../settings.py -------------------------------------------------------------------------------- /tests/assets/table_creations/conftest_.py: -------------------------------------------------------------------------------- 1 | ../conftest_.py -------------------------------------------------------------------------------- /.github/FUNDING.yml: -------------------------------------------------------------------------------- 1 | custom: ["https://sponsor.long2ice.io"] 2 | -------------------------------------------------------------------------------- /aerich/__main__.py: -------------------------------------------------------------------------------- 1 | from .cli import main 2 | 3 | main() 4 | -------------------------------------------------------------------------------- /tests/assets/command_programmatically/settings.py: -------------------------------------------------------------------------------- 1 | ../settings.py -------------------------------------------------------------------------------- /tests/assets/custom_index_offline/conftest_.py: -------------------------------------------------------------------------------- 1 | ../conftest_.py -------------------------------------------------------------------------------- /tests/assets/custom_index_offline/settings.py: -------------------------------------------------------------------------------- 1 | ../settings.py -------------------------------------------------------------------------------- /tests/assets/per_app_migrations/conftest_.py: -------------------------------------------------------------------------------- 1 | ../conftest_.py -------------------------------------------------------------------------------- /tests/assets/delete_model_with_m2m_field/conftest_.py: -------------------------------------------------------------------------------- 1 | ../conftest_.py -------------------------------------------------------------------------------- /tests/assets/delete_model_with_m2m_field/settings.py: -------------------------------------------------------------------------------- 1 | ../settings.py -------------------------------------------------------------------------------- /tests/assets/command_programmatically/models.py: -------------------------------------------------------------------------------- 1 | from tortoise import Model, fields 2 | 3 | 4 | class Foo(Model): 5 | a = fields.IntField() 6 | -------------------------------------------------------------------------------- /tests/assets/migrate_no_input/models.py: -------------------------------------------------------------------------------- 1 | from tortoise import Model, fields 2 | 3 | 4 | class Foo(Model): 5 | name = fields.CharField(10) 6 | -------------------------------------------------------------------------------- /tests/assets/missing_aerich_models/models.py: -------------------------------------------------------------------------------- 1 | from tortoise import Model, fields 2 | 3 | 4 | class Foo(Model): 5 | a = fields.IntField() 6 | -------------------------------------------------------------------------------- /tests/assets/sqlite_old_style/example_db.sqlite3: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tortoise/aerich/HEAD/tests/assets/sqlite_old_style/example_db.sqlite3 -------------------------------------------------------------------------------- /tests/assets/table_creations/models.py: -------------------------------------------------------------------------------- 1 | from tortoise import Model, fields 2 | 3 | 4 | class Foo(Model): 5 | name = fields.CharField(20) 6 | -------------------------------------------------------------------------------- /aerich/enums.py: -------------------------------------------------------------------------------- 1 | from enum import Enum 2 | 3 | 4 | class Color(str, Enum): 5 | green = "green" 6 | red = "red" 7 | yellow = "yellow" 8 | -------------------------------------------------------------------------------- /tests/assets/class_var_config/app/models.py: -------------------------------------------------------------------------------- 1 | from tortoise import Model, fields 2 | 3 | 4 | class Foo(Model): 5 | name = fields.CharField(10) 6 | -------------------------------------------------------------------------------- /tests/assets/custom_index_offline/models.py: -------------------------------------------------------------------------------- 1 | from tortoise import Model, fields 2 | 3 | 4 | class Foo(Model): 5 | name = fields.CharField(20) 6 | -------------------------------------------------------------------------------- /tests/assets/missing_aerich_models/other_models.py: -------------------------------------------------------------------------------- 1 | from tortoise import Model, fields 2 | 3 | 4 | class Sth(Model): 5 | a = fields.IntField() 6 | -------------------------------------------------------------------------------- /tests/assets/per_app_migrations/auth/models.py: -------------------------------------------------------------------------------- 1 | from tortoise import Model, fields 2 | 3 | 4 | class Users(Model): 5 | name = fields.CharField(20) 6 | -------------------------------------------------------------------------------- /tests/assets/delete_model_with_m2m_field/models_3.py: -------------------------------------------------------------------------------- 1 | from tortoise import Model, fields 2 | 3 | 4 | class User(Model): 5 | name = fields.CharField(max_length=55) 6 | -------------------------------------------------------------------------------- /tests/assets/per_app_migrations/pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.aerich] 2 | tortoise_orm = "settings.TORTOISE_ORM" 3 | location = "./{app}/migrations" 4 | src_folder = "./." 5 | 6 | -------------------------------------------------------------------------------- /tests/assets/sqlite_migrate/models.py: -------------------------------------------------------------------------------- 1 | from tortoise import Model, fields 2 | 3 | 4 | class Foo(Model): 5 | name = fields.CharField(max_length=60, db_index=False) 6 | -------------------------------------------------------------------------------- /tests/assets/sqlite_old_style/models.py: -------------------------------------------------------------------------------- 1 | from tortoise import Model, fields 2 | 3 | 4 | class Foo(Model): 5 | name = fields.CharField(max_length=60, db_index=False) 6 | -------------------------------------------------------------------------------- /tests/assets/command_programmatically/models_2.py: -------------------------------------------------------------------------------- 1 | from tortoise import Model, fields 2 | 3 | 4 | class Foo(Model): 5 | a = fields.IntField() 6 | b = fields.IntField() 7 | -------------------------------------------------------------------------------- /tests/assets/settings.py: -------------------------------------------------------------------------------- 1 | TORTOISE_ORM = { 2 | "connections": {"default": "sqlite://db.sqlite3"}, 3 | "apps": {"models": {"models": ["models", "aerich.models"]}}, 4 | } 5 | -------------------------------------------------------------------------------- /tests/indexes.py: -------------------------------------------------------------------------------- 1 | from tortoise.indexes import Index 2 | 3 | 4 | class CustomIndex(Index): 5 | def __init__(self, *args, **kw) -> None: 6 | super().__init__(*args, **kw) 7 | self._foo = "" 8 | -------------------------------------------------------------------------------- /tests/assets/m2m_rescursive/models.py: -------------------------------------------------------------------------------- 1 | from tortoise import Model, fields 2 | 3 | 4 | class Node(Model): 5 | children = fields.ManyToManyField( 6 | "models.Node", 7 | related_name="parents", 8 | ) 9 | -------------------------------------------------------------------------------- /tests/assets/custom_index_offline/models_3.py: -------------------------------------------------------------------------------- 1 | from tortoise import Model, fields 2 | from tortoise.indexes import Index 3 | 4 | 5 | class CustomIndex(Index): ... 6 | 7 | 8 | class Foo(Model): 9 | name = fields.CharField(20) 10 | -------------------------------------------------------------------------------- /tests/assets/per_app_migrations/polls/models.py: -------------------------------------------------------------------------------- 1 | from tortoise import fields, models 2 | 3 | 4 | class Question(models.Model): 5 | question_text = fields.CharField(max_length=200) 6 | pub_date = fields.DatetimeField(description="date published") 7 | -------------------------------------------------------------------------------- /tests/assets/per_app_migrations/settings.py: -------------------------------------------------------------------------------- 1 | TORTOISE_ORM = { 2 | "connections": {"default": "sqlite://db.sqlite3"}, 3 | "apps": { 4 | "auth": {"models": ["auth.models", "aerich.models"]}, 5 | "polls": {"models": ["polls.models"]}, 6 | }, 7 | } 8 | -------------------------------------------------------------------------------- /tests/assets/postgres_vector/pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.aerich] 2 | tortoise_orm = "settings.TORTOISE_ORM" 3 | location = "./migrations" 4 | src_folder = "./." 5 | 6 | [tool.aerich.inspectdb] 7 | tsvector = "tortoise.contrib.postgres.fields.TSVectorField" 8 | vector = "tortoise_vector.field.VectorField" 9 | -------------------------------------------------------------------------------- /tests/assets/postgres_vector/models.py: -------------------------------------------------------------------------------- 1 | from tortoise import Model, fields 2 | from tortoise.contrib.postgres.fields import TSVectorField 3 | from tortoise_vector.field import VectorField 4 | 5 | 6 | class Foo(Model): 7 | a = fields.IntField() 8 | b = TSVectorField() 9 | c = VectorField(1536) 10 | -------------------------------------------------------------------------------- /tests/assets/custom_index_offline/models_2.py: -------------------------------------------------------------------------------- 1 | from tortoise import Model, fields 2 | from tortoise.indexes import Index 3 | 4 | 5 | class CustomIndex(Index): ... 6 | 7 | 8 | class Foo(Model): 9 | name = fields.CharField(20) 10 | 11 | class Meta: 12 | indexes = [CustomIndex(fields=["name"])] 13 | -------------------------------------------------------------------------------- /tests/assets/m2m_rescursive/models_2.py: -------------------------------------------------------------------------------- 1 | from tortoise import Model, fields 2 | 3 | 4 | class Node(Model): 5 | children = fields.ManyToManyField( 6 | "models.Node", 7 | related_name="parents", 8 | ) 9 | 10 | 11 | class Dummy(Model): 12 | name = fields.CharField(max_length=100, null=True) 13 | -------------------------------------------------------------------------------- /tests/assets/drop_field_unique/models_2.py: -------------------------------------------------------------------------------- 1 | from tortoise import Model 2 | from tortoise.fields import CharField 3 | 4 | 5 | class UserTicketPackage(Model): 6 | package_order_id = CharField(max_length=100) 7 | qr_code = CharField(max_length=100, unique=False) 8 | 9 | class Meta: 10 | table = "user_ticket_package" 11 | -------------------------------------------------------------------------------- /tests/assets/drop_field_unique/models.py: -------------------------------------------------------------------------------- 1 | from tortoise import Model 2 | from tortoise.fields import CharField 3 | 4 | 5 | class UserTicketPackage(Model): 6 | package_order_id = CharField(max_length=100, unique=True) 7 | qr_code = CharField(max_length=100, unique=True) 8 | 9 | class Meta: 10 | table = "user_ticket_package" 11 | -------------------------------------------------------------------------------- /tests/assets/drop_field_unique/models_3.py: -------------------------------------------------------------------------------- 1 | from tortoise import Model 2 | from tortoise.fields import CharField 3 | 4 | 5 | class UserTicketPackage(Model): 6 | package_order_id = CharField(max_length=100, unique=True) 7 | qr_code = CharField(max_length=100, unique=True, db_index=True) 8 | 9 | class Meta: 10 | table = "user_ticket_package" 11 | -------------------------------------------------------------------------------- /aerich/exceptions.py: -------------------------------------------------------------------------------- 1 | class AerichError(Exception): 2 | pass 3 | 4 | 5 | class NotSupportError(AerichError): 6 | """ 7 | raise when features not support 8 | """ 9 | 10 | 11 | class DowngradeError(AerichError): 12 | """ 13 | raise when downgrade error 14 | """ 15 | 16 | 17 | class NotInitedError(AerichError): 18 | """ 19 | raise when Tortoise not inited 20 | """ 21 | -------------------------------------------------------------------------------- /tests/assets/sqlite_old_style/_migrations/models/1_20250405163135_update.py: -------------------------------------------------------------------------------- 1 | from tortoise import BaseDBAsyncClient 2 | 3 | 4 | async def upgrade(db: BaseDBAsyncClient) -> str: 5 | return """ 6 | CREATE INDEX "idx_foo_name_2bbf45" ON "foo" ("name");""" 7 | 8 | 9 | async def downgrade(db: BaseDBAsyncClient) -> str: 10 | return """ 11 | DROP INDEX IF EXISTS "idx_foo_name_2bbf45";""" 12 | -------------------------------------------------------------------------------- /tests/assets/delete_model_with_m2m_field/models_2.py: -------------------------------------------------------------------------------- 1 | from tortoise import Model, fields 2 | 3 | 4 | class User(Model): 5 | name = fields.CharField(max_length=55) 6 | groups: fields.ReverseRelation["Group"] 7 | 8 | 9 | class Group(Model): 10 | name = fields.TextField() 11 | users: fields.ManyToManyRelation[User] = fields.ManyToManyField( 12 | "models.User", related_name="groups" 13 | ) 14 | -------------------------------------------------------------------------------- /tests/assets/drop_field_unique/models_5.py: -------------------------------------------------------------------------------- 1 | from tortoise import Model 2 | from tortoise.fields import CharField 3 | 4 | 5 | class UserTicketPackage(Model): 6 | package_order_id = CharField(max_length=100, unique=True) 7 | qr_code = CharField(max_length=100, unique=True, db_index=True) 8 | name = CharField(max_length=100, default="") 9 | 10 | class Meta: 11 | table = "user_ticket_package" 12 | -------------------------------------------------------------------------------- /tests/assets/class_var_config/_tests.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from app.models import Foo 4 | 5 | 6 | @pytest.mark.anyio 7 | async def test_init_db(): 8 | await Foo.create(name="foo") 9 | obj = await Foo.create(name="foo", age=1) 10 | assert not hasattr(obj, "age") 11 | 12 | 13 | @pytest.mark.anyio 14 | async def test_migrate_upgrade(): 15 | obj = await Foo.create(name="foo", age=1) 16 | assert obj.age == 1 17 | -------------------------------------------------------------------------------- /tests/assets/drop_field_unique/models_4.py: -------------------------------------------------------------------------------- 1 | from tortoise import Model 2 | from tortoise.fields import CharField 3 | 4 | 5 | class UserTicketPackage(Model): 6 | package_order_id = CharField(max_length=100, unique=True) 7 | qr_code = CharField(max_length=100, unique=True, db_index=True) 8 | name = CharField(max_length=100, unique=True, default="") 9 | 10 | class Meta: 11 | table = "user_ticket_package" 12 | -------------------------------------------------------------------------------- /tests/assets/m2m_comment/models.py: -------------------------------------------------------------------------------- 1 | from tortoise import Model, fields 2 | 3 | 4 | class DataLibGroup(Model): 5 | id = fields.IntField(pk=True) 6 | name = fields.CharField(unique=True, max_length=100) 7 | parent = fields.ForeignKeyField( 8 | "models.DataLibGroup", related_name="by_children_list", null=True 9 | ) 10 | level = fields.IntField(default=0) 11 | disabled = fields.BooleanField(default=False) 12 | -------------------------------------------------------------------------------- /tests/assets/remove_constraint/models_2.py: -------------------------------------------------------------------------------- 1 | from tortoise import Model, fields 2 | 3 | 4 | class Foo(Model): 5 | a = fields.IntField() 6 | b = fields.IntField() 7 | c = fields.IntField(unique=True) 8 | 9 | 10 | class Sth(Model): 11 | a = fields.IntField() 12 | b = fields.IntField() 13 | c = fields.IntField() 14 | d = fields.IntField() 15 | 16 | class Meta: 17 | unique_together = [("a", "b")] 18 | -------------------------------------------------------------------------------- /aerich/models.py: -------------------------------------------------------------------------------- 1 | from tortoise import Model, fields 2 | 3 | from aerich.coder import decoder, encoder 4 | 5 | MAX_VERSION_LENGTH = 255 6 | MAX_APP_LENGTH = 100 7 | 8 | 9 | class Aerich(Model): 10 | version = fields.CharField(max_length=MAX_VERSION_LENGTH) 11 | app = fields.CharField(max_length=MAX_APP_LENGTH) 12 | content: dict = fields.JSONField(encoder=encoder, decoder=decoder) 13 | 14 | class Meta: 15 | ordering = ["-id"] 16 | -------------------------------------------------------------------------------- /tests/assets/remove_constraint/models.py: -------------------------------------------------------------------------------- 1 | from tortoise import Model, fields 2 | 3 | 4 | class Foo(Model): 5 | a = fields.IntField() 6 | b = fields.IntField() 7 | c = fields.IntField(unique=True) 8 | 9 | class Meta: 10 | unique_together = ("a", "b") 11 | 12 | 13 | class Sth(Model): 14 | a = fields.IntField() 15 | b = fields.IntField() 16 | c = fields.IntField() 17 | d = fields.IntField() 18 | 19 | class Meta: 20 | unique_together = [("a", "b"), ("c", "d")] 21 | -------------------------------------------------------------------------------- /tests/assets/custom_index_offline/_tests.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from models import Foo 3 | 4 | 5 | @pytest.mark.anyio 6 | async def test_1(): 7 | obj1 = await Foo.create(name="foo") 8 | assert obj1 in (await Foo.all()) 9 | 10 | 11 | @pytest.mark.anyio 12 | async def test_2(): 13 | obj2 = await Foo.create(name="foo2") 14 | assert obj2 in (await Foo.all()) 15 | 16 | 17 | @pytest.mark.anyio 18 | async def test_3(): 19 | obj3 = await Foo.create(name="foo3") 20 | assert obj3 in (await Foo.all()) 21 | -------------------------------------------------------------------------------- /tests/assets/per_app_migrations/models_2.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from tortoise import Model, fields 4 | from tortoise.fields import OnDelete 5 | 6 | 7 | class Users(Model): 8 | name = fields.CharField(20) 9 | roles: fields.ReverseRelation[Users] 10 | 11 | 12 | class Role(Model): 13 | name = fields.CharField(20) 14 | user: fields.ForeignKeyNullableRelation[Users] = fields.ForeignKeyField( 15 | "auth.Users", on_delete=OnDelete.CASCADE, related_name="roles", null=True 16 | ) 17 | -------------------------------------------------------------------------------- /tests/assets/remove_constraint/models_3.py: -------------------------------------------------------------------------------- 1 | from tortoise import Model, fields 2 | 3 | 4 | class Foo(Model): 5 | a = fields.IntField() 6 | b = fields.IntField() 7 | c = fields.IntField(unique=True) 8 | 9 | 10 | class Sth(Model): 11 | a = fields.IntField() 12 | b = fields.IntField() 13 | c = fields.IntField() 14 | d = fields.IntField() 15 | e = fields.IntField(null=True) 16 | f = fields.IntField(null=True) 17 | 18 | class Meta: 19 | unique_together = [("a", "b"), ("e", "f")] 20 | -------------------------------------------------------------------------------- /.github/workflows/pypi.yml: -------------------------------------------------------------------------------- 1 | name: pypi 2 | on: 3 | release: 4 | types: 5 | - created 6 | jobs: 7 | publish: 8 | runs-on: ubuntu-latest 9 | steps: 10 | - uses: actions/checkout@v5 11 | - uses: actions/setup-python@v6 12 | with: 13 | python-version: '3.x' 14 | - uses: astral-sh/setup-uv@v6 15 | - name: Build dists 16 | run: make build 17 | - name: Pypi Publish 18 | uses: pypa/gh-action-pypi-publish@release/v1 19 | with: 20 | user: __token__ 21 | password: ${{ secrets.pypi_password }} 22 | -------------------------------------------------------------------------------- /tests/assets/conftest_.py: -------------------------------------------------------------------------------- 1 | import contextlib 2 | 3 | import pytest 4 | 5 | with contextlib.suppress(KeyError): # Use suppress to fix ruff check issue I001 6 | from aerich import TortoiseContext 7 | 8 | try: 9 | from settings import TORTOISE_ORM # type:ignore[import-not-found] 10 | except ImportError: 11 | TORTOISE_ORM = None 12 | 13 | 14 | @pytest.fixture(scope="session") 15 | def anyio_backend() -> str: 16 | return "asyncio" 17 | 18 | 19 | @pytest.fixture(autouse=True) 20 | async def init_connections(): 21 | async with TortoiseContext(TORTOISE_ORM): 22 | yield 23 | -------------------------------------------------------------------------------- /tests/assets/m2m_comment/models_2.py: -------------------------------------------------------------------------------- 1 | from tortoise import Model, fields 2 | 3 | 4 | class DataLibGroup(Model): 5 | id = fields.IntField(pk=True) 6 | name = fields.CharField(unique=True, max_length=100) 7 | parent = fields.ForeignKeyField( 8 | "models.DataLibGroup", related_name="by_children_list", null=True 9 | ) 10 | level = fields.IntField(default=0) 11 | disabled = fields.BooleanField(default=False) 12 | 13 | 14 | class DataLibItem(Model): 15 | id = fields.IntField(pk=True) 16 | by_group_list = fields.ManyToManyField("models.DataLibGroup", description="I'm testing") 17 | -------------------------------------------------------------------------------- /tests/assets/m2m_comment/settings.py: -------------------------------------------------------------------------------- 1 | import os 2 | from datetime import date 3 | 4 | from tortoise.contrib.test import MEMORY_SQLITE 5 | 6 | DB_URL = MEMORY_SQLITE 7 | if _u := os.getenv("TEST_DB"): 8 | _db_name = f"aerich_test_m2m_comment_{date.today():%Y%m%d}" 9 | _u = _u.replace("\\{\\}", _db_name) # For Linux 10 | DB_URL = _u.replace("/{/}", _db_name) # For Windows 11 | 12 | TORTOISE_ORM = { 13 | "connections": { 14 | "default": DB_URL.replace(MEMORY_SQLITE, "sqlite://db.sqlite3"), 15 | }, 16 | "apps": { 17 | "models": {"models": ["models", "aerich.models"], "default_connection": "default"}, 18 | }, 19 | } 20 | -------------------------------------------------------------------------------- /tests/assets/postgres_vector/settings.py: -------------------------------------------------------------------------------- 1 | import os 2 | from datetime import date 3 | 4 | from tortoise.contrib.test import MEMORY_SQLITE 5 | 6 | DB_URL = MEMORY_SQLITE 7 | if _u := os.getenv("TEST_DB"): 8 | _db_name = f"aerich_postgres_vector_{date.today():%Y%m%d}" 9 | _u = _u.replace("\\{\\}", _db_name) # For Linux 10 | DB_URL = _u.replace("/{/}", _db_name) # For Windows 11 | 12 | TORTOISE_ORM = { 13 | "connections": { 14 | "default": DB_URL.replace(MEMORY_SQLITE, "sqlite://db.sqlite3"), 15 | }, 16 | "apps": { 17 | "models": {"models": ["models", "aerich.models"], "default_connection": "default"}, 18 | }, 19 | } 20 | -------------------------------------------------------------------------------- /tests/assets/sqlite_old_style/_migrations/models/0_20250405163033_init.py: -------------------------------------------------------------------------------- 1 | from tortoise import BaseDBAsyncClient 2 | 3 | 4 | async def upgrade(db: BaseDBAsyncClient) -> str: 5 | return """ 6 | CREATE TABLE IF NOT EXISTS "foo" ( 7 | "id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, 8 | "name" VARCHAR(60) NOT NULL 9 | ); 10 | CREATE TABLE IF NOT EXISTS "aerich" ( 11 | "id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, 12 | "version" VARCHAR(255) NOT NULL, 13 | "app" VARCHAR(100) NOT NULL, 14 | "content" JSON NOT NULL 15 | );""" 16 | 17 | 18 | async def downgrade(db: BaseDBAsyncClient) -> str: 19 | return """ 20 | """ 21 | -------------------------------------------------------------------------------- /tests/assets/table_creations/settings.py: -------------------------------------------------------------------------------- 1 | import os 2 | from datetime import date 3 | 4 | from tortoise.contrib.test import MEMORY_SQLITE 5 | 6 | DB_URL = MEMORY_SQLITE 7 | if _u := os.getenv("TEST_DB"): 8 | _db_name = f"aerich_test_table_creations_{date.today():%Y%m%d}" 9 | _u = _u.replace("\\{\\}", _db_name) # For Linux 10 | DB_URL = _u.replace("/{/}", _db_name) # For Windows 11 | 12 | TORTOISE_ORM = { 13 | "connections": { 14 | "default": DB_URL.replace(MEMORY_SQLITE, "sqlite://db.sqlite3"), 15 | }, 16 | "apps": { 17 | "models": {"models": ["models", "aerich.models"], "default_connection": "default"}, 18 | }, 19 | } 20 | -------------------------------------------------------------------------------- /tests/assets/remove_constraint/models_4.py: -------------------------------------------------------------------------------- 1 | from tortoise import Model, fields 2 | 3 | 4 | class Foo(Model): 5 | a = fields.IntField() 6 | b = fields.IntField() 7 | c = fields.IntField(unique=True) 8 | 9 | 10 | class Sth(Model): 11 | a = fields.IntField() 12 | b = fields.IntField() 13 | c = fields.IntField() 14 | d = fields.IntField() 15 | e = fields.IntField(null=True) 16 | f = fields.IntField(null=True) 17 | 18 | class Meta: 19 | unique_together = [("a", "b"), ("e", "f")] 20 | 21 | 22 | class New(Model): 23 | a = fields.IntField() 24 | b = fields.IntField() 25 | 26 | class Meta: 27 | unique_together = [("a", "b")] 28 | -------------------------------------------------------------------------------- /tests/assets/table_creations/models_2.py: -------------------------------------------------------------------------------- 1 | from tortoise import Model, fields 2 | 3 | 4 | class Foo(Model): 5 | name = fields.CharField(20) 6 | g = fields.OneToOneField("models.G", null=True) 7 | h = fields.ForeignKeyField("models.H", null=True) 8 | 9 | 10 | class B(Model): 11 | name = fields.CharField(20) 12 | 13 | 14 | class C(Model): 15 | name = fields.CharField(20) 16 | 17 | 18 | class G(Model): 19 | name = fields.CharField(20) 20 | 21 | 22 | class H(Model): 23 | name = fields.CharField(20) 24 | 25 | 26 | class A(Model): 27 | name = fields.CharField(20) 28 | b = fields.ForeignKeyField("models.B") 29 | c = fields.OneToOneField("models.C") 30 | -------------------------------------------------------------------------------- /tests/assets/m2m_comment/_tests.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from models import DataLibGroup 3 | 4 | 5 | @pytest.mark.anyio 6 | async def test_1(): 7 | obj1 = await DataLibGroup.create(name="parent") 8 | obj2 = await DataLibGroup.create(name="child", parent=obj1) 9 | assert obj2 in (await obj1.by_children_list.all()) 10 | 11 | 12 | @pytest.mark.anyio 13 | async def test_2(): 14 | from models import DataLibItem 15 | 16 | group = await DataLibGroup.create(name="group") 17 | item = await DataLibItem.create(id=1) 18 | await item.by_group_list.add(group) 19 | saved_item = await DataLibItem.get(pk=item.pk) 20 | assert group in (await saved_item.by_group_list.all()) 21 | -------------------------------------------------------------------------------- /tests/assets/remove_constraint/models_5.py: -------------------------------------------------------------------------------- 1 | from tortoise import Model, fields 2 | 3 | 4 | class Foo(Model): 5 | a = fields.IntField() 6 | b = fields.IntField() 7 | c = fields.IntField(unique=True) 8 | 9 | 10 | class Sth(Model): 11 | a = fields.IntField() 12 | b = fields.IntField() 13 | c = fields.IntField() 14 | d = fields.IntField() 15 | e = fields.IntField(null=True) 16 | f = fields.IntField(null=True) 17 | 18 | class Meta: 19 | unique_together = [("a", "b"), ("e", "f")] 20 | 21 | 22 | class New(Model): 23 | a2 = fields.IntField() 24 | b2 = fields.IntField() 25 | 26 | class Meta: 27 | unique_together = [("a2", "b2")] 28 | -------------------------------------------------------------------------------- /tests/assets/db.py: -------------------------------------------------------------------------------- 1 | import asyncclick as click 2 | from settings import TORTOISE_ORM 3 | 4 | try: 5 | from _utils import drop_db, init_db 6 | except ImportError: 7 | from tests._utils import drop_db, init_db 8 | 9 | 10 | @click.group() 11 | def cli(): ... 12 | 13 | 14 | @cli.command() 15 | async def create(): 16 | await init_db(TORTOISE_ORM, False) 17 | click.echo(f"Success to create databases for {TORTOISE_ORM['connections']}") 18 | 19 | 20 | @cli.command() 21 | async def drop(): 22 | await drop_db(TORTOISE_ORM) 23 | click.echo(f"Dropped databases for {TORTOISE_ORM['connections']}") 24 | 25 | 26 | def main(): 27 | cli() 28 | 29 | 30 | if __name__ == "__main__": 31 | main() 32 | -------------------------------------------------------------------------------- /tests/assets/drop_field_unique/settings.py: -------------------------------------------------------------------------------- 1 | import os 2 | from datetime import date 3 | 4 | from tortoise.contrib.test import MEMORY_SQLITE 5 | 6 | DB_URL = MEMORY_SQLITE 7 | if _u := os.getenv("TEST_DB"): 8 | _db_name = f"aerich_drop_field_unique_{date.today():%Y%m%d}" 9 | _u = _u.replace("\\{\\}", _db_name) # For Linux 10 | DB_URL = _u.replace("/{/}", _db_name) # For Windows 11 | DB_URL_SECOND = (DB_URL + "_second") if DB_URL != MEMORY_SQLITE else MEMORY_SQLITE 12 | 13 | TORTOISE_ORM = { 14 | "connections": { 15 | "default": DB_URL.replace(MEMORY_SQLITE, "sqlite://db.sqlite3"), 16 | }, 17 | "apps": { 18 | "models": {"models": ["models", "aerich.models"], "default_connection": "default"}, 19 | }, 20 | } 21 | -------------------------------------------------------------------------------- /tests/assets/remove_constraint/settings.py: -------------------------------------------------------------------------------- 1 | import os 2 | from datetime import date 3 | 4 | from tortoise.contrib.test import MEMORY_SQLITE 5 | 6 | DB_URL = MEMORY_SQLITE 7 | if _u := os.getenv("TEST_DB"): 8 | _db_name = f"aerich_remove_constraint_{date.today():%Y%m%d}" 9 | _u = _u.replace("\\{\\}", _db_name) # For Linux 10 | DB_URL = _u.replace("/{/}", _db_name) # For Windows 11 | DB_URL_SECOND = (DB_URL + "_second") if DB_URL != MEMORY_SQLITE else MEMORY_SQLITE 12 | 13 | TORTOISE_ORM = { 14 | "connections": { 15 | "default": DB_URL.replace(MEMORY_SQLITE, "sqlite://db.sqlite3"), 16 | }, 17 | "apps": { 18 | "models": {"models": ["models", "aerich.models"], "default_connection": "default"}, 19 | }, 20 | } 21 | -------------------------------------------------------------------------------- /tests/assets/m2m_rescursive/_tests.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from models import Node 3 | 4 | 5 | @pytest.mark.anyio 6 | async def test_1(): 7 | obj1 = await Node.create(id=1) 8 | obj2 = await Node.create(id=2) 9 | await obj1.children.add(obj2) 10 | saved_obj = await Node.get(pk=obj1.pk) 11 | assert obj2 in (await saved_obj.children.all()) 12 | 13 | 14 | @pytest.mark.anyio 15 | async def test_2(): 16 | from models import Dummy 17 | 18 | await Dummy.create(name="foo") 19 | 20 | obj3 = await Node.create(id=3) 21 | obj4 = await Node.create(id=4) 22 | obj5 = await Node.create(id=5) 23 | await obj3.children.add(obj4, obj5) 24 | saved_obj = await Node.get(pk=obj3.pk) 25 | assert (await saved_obj.children.all()) == [obj4, obj5] 26 | -------------------------------------------------------------------------------- /tests/assets/ignore_on_delete/_tests.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from models import Group, Profile, User 3 | 4 | 5 | async def _test_cascade() -> None: 6 | group = await Group.create(name="g") 7 | user1 = await User.create(name="u1", group=group) 8 | user2 = await User.create(name="u2", group=group) 9 | profile1 = await Profile.create(age=1, user=user1) 10 | profile2 = await Profile.create(age=2, user=user2) 11 | await group.delete() 12 | assert not await User.filter(id__in=[user1.id, user2.id]).exists() 13 | assert not await Profile.filter(id__in=[profile1.id, profile2.id]).exists() 14 | 15 | 16 | @pytest.mark.anyio 17 | async def test_1(): 18 | await _test_cascade() 19 | 20 | 21 | @pytest.mark.anyio 22 | async def test_2(): 23 | await _test_cascade() 24 | -------------------------------------------------------------------------------- /tests/assets/table_creations/_tests.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from models import Foo 3 | 4 | 5 | @pytest.mark.anyio 6 | async def test_1(): 7 | obj1 = await Foo.create(name="foo") 8 | assert obj1 in (await Foo.all()) 9 | 10 | 11 | @pytest.mark.anyio 12 | async def test_2(): 13 | from models import A, B, C, G, H 14 | 15 | b = await B.create(name="b") 16 | c = await C.create(name="c") 17 | g = await G.create(name="g") 18 | h = await H.create(name="h") 19 | a = await A.create(name="a", b=b, c=c) 20 | f = await Foo.create(name="f", g=g) 21 | fh = await Foo.create(name="fh", h=h) 22 | 23 | assert a in (await A.filter(b=b)) 24 | assert a in (await A.filter(c=c)) 25 | assert f in (await Foo.filter(g=g)) 26 | assert fh in (await Foo.filter(h=h)) 27 | -------------------------------------------------------------------------------- /tests/assets/delete_model_with_m2m_field/models.py: -------------------------------------------------------------------------------- 1 | from tortoise import Model, fields 2 | 3 | 4 | class Event(Model): 5 | id = fields.IntField(primary_key=True) 6 | name = fields.CharField(max_length=255) 7 | participants = fields.ManyToManyField( 8 | "models.Team", related_name="events", through="event_team" 9 | ) 10 | 11 | 12 | class Team(Model): 13 | id = fields.IntField(primary_key=True) 14 | name = fields.CharField(max_length=255) 15 | 16 | 17 | class User(Model): 18 | name = fields.CharField(max_length=55) 19 | groups: fields.ReverseRelation["Group"] 20 | 21 | 22 | class Group(Model): 23 | name = fields.TextField() 24 | users: fields.ManyToManyRelation[User] = fields.ManyToManyField( 25 | "models.User", related_name="groups" 26 | ) 27 | -------------------------------------------------------------------------------- /tests/assets/ignore_on_delete/models.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from tortoise import Model, fields 4 | from tortoise.fields import OnDelete 5 | 6 | 7 | class Group(Model): 8 | name = fields.CharField(20) 9 | 10 | users: fields.ReverseRelation[User] 11 | 12 | 13 | class User(Model): 14 | name = fields.CharField(20) 15 | group: fields.ForeignKeyNullableRelation[Group] = fields.ForeignKeyField( 16 | "models.Group", on_delete=OnDelete.CASCADE, related_name="users", null=True 17 | ) 18 | 19 | profile: fields.ReverseRelation[Profile] 20 | 21 | 22 | class Profile(Model): 23 | age = fields.IntField() 24 | user: fields.OneToOneRelation[User] = fields.OneToOneField( 25 | "models.User", on_delete=OnDelete.CASCADE, related_name="profile" 26 | ) 27 | -------------------------------------------------------------------------------- /tests/assets/ignore_on_delete/models_2.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from tortoise import Model, fields 4 | from tortoise.fields import OnDelete 5 | 6 | 7 | class Group(Model): 8 | name = fields.CharField(20) 9 | 10 | users: fields.ReverseRelation[User] 11 | 12 | 13 | class User(Model): 14 | name = fields.CharField(20) 15 | group: fields.ForeignKeyNullableRelation[Group] = fields.ForeignKeyField( 16 | "models.Group", on_delete=OnDelete.SET_NULL, related_name="users", null=True 17 | ) 18 | 19 | profile: fields.ReverseRelation[Profile] 20 | 21 | 22 | class Profile(Model): 23 | age = fields.IntField() 24 | user: fields.OneToOneRelation[User] = fields.OneToOneField( 25 | "models.User", on_delete=OnDelete.NO_ACTION, related_name="profile" 26 | ) 27 | -------------------------------------------------------------------------------- /tests/assets/fake/settings.py: -------------------------------------------------------------------------------- 1 | import os 2 | from datetime import date 3 | 4 | from tortoise.contrib.test import MEMORY_SQLITE 5 | 6 | DB_URL = MEMORY_SQLITE 7 | if _u := os.getenv("TEST_DB"): 8 | _db_name = f"aerich_fake_{date.today():%Y%m%d}" 9 | _u = _u.replace("\\{\\}", _db_name) # For Linux 10 | DB_URL = _u.replace("/{/}", _db_name) # For Windows 11 | DB_URL_SECOND = (DB_URL + "_second") if DB_URL != MEMORY_SQLITE else MEMORY_SQLITE 12 | 13 | TORTOISE_ORM = { 14 | "connections": { 15 | "default": DB_URL.replace(MEMORY_SQLITE, "sqlite://db.sqlite3"), 16 | "second": DB_URL_SECOND.replace(MEMORY_SQLITE, "sqlite://db_second.sqlite3"), 17 | }, 18 | "apps": { 19 | "models": {"models": ["models", "aerich.models"], "default_connection": "default"}, 20 | "models_second": {"models": ["models_second"], "default_connection": "second"}, 21 | }, 22 | } 23 | -------------------------------------------------------------------------------- /tests/assets/per_app_migrations/models_3.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from typing import TYPE_CHECKING 4 | 5 | from tortoise import Model 6 | from tortoise import fields as models 7 | 8 | if TYPE_CHECKING: 9 | from tortoise.fields import ForeignKeyRelation, ReverseRelation 10 | 11 | models.DateTimeField = models.DatetimeField 12 | models.IntegerField = models.IntField 13 | models.ForeignKey = models.ForeignKeyField 14 | 15 | 16 | class Question(Model): 17 | question_text = models.CharField(max_length=200) 18 | pub_date = models.DateTimeField(description="date published") 19 | choice_set: ReverseRelation[Choice] 20 | 21 | 22 | class Choice(Model): 23 | question: ForeignKeyRelation[Question] = models.ForeignKey( 24 | "polls.Question", on_delete=models.CASCADE, related_name="choice_set" 25 | ) 26 | choice_text = models.CharField(max_length=200) 27 | votes = models.IntegerField(default=0) 28 | -------------------------------------------------------------------------------- /tests/assets/per_app_migrations/_tests.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from auth.models import Users 3 | from polls.models import Question 4 | from tortoise import timezone 5 | 6 | 7 | @pytest.mark.anyio 8 | async def test_1(): 9 | user = await Users.create(name="iron") 10 | assert user in (await Users.all()) 11 | question = await Question.create(question_text="How are you?", pub_date=timezone.now()) 12 | assert question in (await Question.all()) 13 | 14 | 15 | @pytest.mark.anyio 16 | async def test_2(): 17 | from auth.models import Role 18 | 19 | user = await Users.create(name="been") 20 | role = await Role.create(name="Member", user=user) 21 | assert role in (await user.roles.all()) 22 | 23 | 24 | @pytest.mark.anyio 25 | async def test_3(): 26 | from polls.models import Choice 27 | 28 | question = await Question.create(question_text="Where are you?", pub_date=timezone.now()) 29 | choice = await Choice.create(choice_text="here", question=question) 30 | assert choice in (await question.choice_set.all()) 31 | -------------------------------------------------------------------------------- /tests/assets/missing_aerich_models/settings.py: -------------------------------------------------------------------------------- 1 | import os 2 | from datetime import date 3 | 4 | from tortoise.contrib.test import MEMORY_SQLITE 5 | 6 | DB_URL = MEMORY_SQLITE 7 | if _u := os.getenv("TEST_DB"): 8 | _db_name = f"aerich_missing_models_{date.today():%Y%m%d}" 9 | _u = _u.replace("\\{\\}", _db_name) # For Linux 10 | DB_URL = _u.replace("/{/}", _db_name) # For Windows 11 | 12 | TORTOISE_ORM = { 13 | "connections": { 14 | "default": DB_URL.replace(MEMORY_SQLITE, "sqlite://db.sqlite3"), 15 | }, 16 | "apps": {"models": {"models": ["models", "aerich.models"]}}, 17 | } 18 | TORTOISE_ORM_NO_AERICH_MODELS = { 19 | **TORTOISE_ORM, 20 | "apps": { 21 | "models": {"models": ["models"]}, 22 | }, 23 | } 24 | TORTOISE_ORM_MULTI_APPS_WITHOUT_AERICH_MODELS = { 25 | **TORTOISE_ORM, 26 | "apps": { 27 | "models": {"models": ["models"]}, 28 | "other_models": {"models": ["other_models"]}, 29 | }, 30 | } 31 | TORTOISE_ORM_MULTI_APPS = { 32 | **TORTOISE_ORM, 33 | "apps": { 34 | "models": {"models": ["models", "aerich.models"]}, 35 | "other_models": {"models": ["other_models"]}, 36 | }, 37 | } 38 | -------------------------------------------------------------------------------- /aerich/ddl/sqlite/__init__.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from tortoise import Model 4 | from tortoise.backends.sqlite.schema_generator import SqliteSchemaGenerator 5 | 6 | from aerich.ddl import BaseDDL 7 | from aerich.exceptions import NotSupportError 8 | 9 | 10 | class SqliteDDL(BaseDDL): 11 | schema_generator_cls = SqliteSchemaGenerator 12 | DIALECT = SqliteSchemaGenerator.DIALECT 13 | _ADD_INDEX_TEMPLATE = 'CREATE {unique}INDEX "{index_name}" ON "{table_name}" ({column_names})' 14 | _DROP_INDEX_TEMPLATE = 'DROP INDEX IF EXISTS "{index_name}"' 15 | 16 | def modify_column(self, model: type[Model], field_object: dict, is_pk: bool = True): 17 | raise NotSupportError("Modify column is unsupported in SQLite.") 18 | 19 | def alter_column_default(self, model: type[Model], field_describe: dict): 20 | raise NotSupportError("Alter column default is unsupported in SQLite.") 21 | 22 | def alter_column_null(self, model: type[Model], field_describe: dict): 23 | raise NotSupportError("Alter column null is unsupported in SQLite.") 24 | 25 | def set_comment(self, model: type[Model], field_describe: dict): 26 | raise NotSupportError("Alter column comment is unsupported in SQLite.") 27 | -------------------------------------------------------------------------------- /tests/test_command.py: -------------------------------------------------------------------------------- 1 | import shutil 2 | 3 | from aerich import Command 4 | from conftest import tortoise_orm 5 | from tests._utils import prepare_py_files, requires_dialect, run_shell 6 | 7 | 8 | async def test_command(mocker): 9 | mocker.patch("os.listdir", return_value=[]) 10 | async with Command(tortoise_orm) as command: 11 | history = await command.history() 12 | heads = await command.heads() 13 | assert history == [] 14 | assert heads == [] 15 | 16 | 17 | @requires_dialect("sqlite") 18 | def test_await_command(tmp_work_dir): 19 | prepare_py_files("command_programmatically") 20 | run_shell("aerich init -t settings.TORTOISE_ORM", capture_output=False) 21 | output = run_shell("pytest -s _tests.py::test_command_not_inited") 22 | assert "error" not in output.lower() 23 | output = run_shell("pytest -s _tests.py::test_init_command_by_async_with") 24 | assert "error" not in output.lower() 25 | output = run_shell("pytest -s _tests.py::test_init_command_by_await") 26 | assert "error" not in output.lower() 27 | output = run_shell("pytest -s _tests.py::test_init_command_by_init_func") 28 | assert "error" not in output.lower() 29 | shutil.move("models_2.py", "models.py") 30 | output = run_shell("pytest -s _tests.py::test_migrate_upgrade") 31 | assert "error" not in output.lower() 32 | -------------------------------------------------------------------------------- /aerich/coder.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import base64 4 | import json 5 | import pickle # nosec: B301,B403 6 | from typing import Any 7 | 8 | from tortoise.indexes import Index 9 | 10 | 11 | class JsonEncoder(json.JSONEncoder): 12 | def default(self, obj: Any) -> Any: 13 | if isinstance(obj, Index): 14 | if hasattr(obj, "describe"): 15 | # For tortoise>=0.24 16 | return obj.describe() 17 | return { 18 | "type": "index", 19 | "val": base64.b64encode(pickle.dumps(obj)).decode(), # nosec: B301 20 | } 21 | else: 22 | return super().default(obj) 23 | 24 | 25 | def object_hook(obj: dict[str, Any]) -> Any: 26 | if (type_ := obj.get("type")) and type_ == "index" and (val := obj.get("val")): 27 | return pickle.loads(base64.b64decode(val)) # nosec: B301 28 | return obj 29 | 30 | 31 | def load_index(obj: dict[str, Any]) -> Index: 32 | """Convert a dict that generated by `Index.decribe()` to a Index instance""" 33 | try: 34 | index = Index(fields=obj["fields"] or obj["expressions"], name=obj.get("name")) 35 | except KeyError: 36 | return object_hook(obj) 37 | if extra := obj.get("extra"): 38 | index.extra = extra 39 | if idx_type := obj.get("type"): 40 | index.INDEX_TYPE = idx_type 41 | return index 42 | 43 | 44 | def encoder(obj: dict[str, Any]) -> str: 45 | return json.dumps(obj, cls=JsonEncoder) 46 | 47 | 48 | def decoder(obj: str | bytes) -> Any: 49 | return json.loads(obj, object_hook=object_hook) 50 | -------------------------------------------------------------------------------- /tests/test_python_m.py: -------------------------------------------------------------------------------- 1 | import re 2 | import shutil 3 | import subprocess # nosec 4 | import sys 5 | from pathlib import Path 6 | 7 | from aerich.version import __version__ 8 | from tests._utils import WINDOWS, requires_env, run_shell 9 | 10 | 11 | def test_python_m_aerich(): 12 | assert __version__ in run_shell("python -m aerich --version") 13 | 14 | 15 | @requires_env("AERICH_TEST_POETRY_ADD") 16 | def test_poetry_add(tmp_work_dir: Path): 17 | poetry = "poetry" 18 | if shutil.which(poetry) is None: 19 | poetry = "uvx " + poetry 20 | run_shell(f'{poetry} init --no-interaction --python=">=3.9"') 21 | py = "{}.{}".format(*sys.version_info) 22 | run_shell(f"{poetry} config --local virtualenvs.in-project true") 23 | run_shell(f"{poetry} env use {py}") 24 | package = Path(__file__).parent.resolve().parent 25 | if WINDOWS and package.anchor != tmp_work_dir.anchor: 26 | # Fix: path is on mount 'D:', start on mount 'C:' 27 | tmp_package = Path(package.name) 28 | tmp_package.mkdir() 29 | shutil.copytree(package / package.name, tmp_package / package.name) 30 | for name in ("pyproject.toml", "README.md"): 31 | shutil.copy(package / name, tmp_package) 32 | package = tmp_package 33 | r = subprocess.run([*poetry.split(), "add", package]) # nosec 34 | assert r.returncode == 0 35 | out = subprocess.run( 36 | [*poetry.split(), "run", "pip", "list"], 37 | text=True, 38 | capture_output=True, 39 | encoding="utf-8", 40 | ).stdout 41 | assert re.search(rf"{package.name}\s*{__version__}", out) 42 | -------------------------------------------------------------------------------- /tests/assets/drop_field_unique/_tests.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from models import UserTicketPackage as Foo 3 | from tortoise.exceptions import OperationalError 4 | 5 | 6 | async def assert_not_unique(): 7 | await Foo.create(package_order_id="1", qr_code="c") 8 | with pytest.raises(OperationalError): 9 | await Foo.create(package_order_id="1", qr_code="c") 10 | with pytest.raises(OperationalError): 11 | await Foo.create(package_order_id="2", qr_code="c") 12 | with pytest.raises(OperationalError): 13 | await Foo.create(package_order_id="1", qr_code="xxx") 14 | 15 | 16 | @pytest.mark.anyio 17 | async def test_1(): 18 | await assert_not_unique() 19 | 20 | 21 | @pytest.mark.anyio 22 | async def test_2(): 23 | await Foo.create(package_order_id="id", qr_code="code") 24 | await Foo.create(package_order_id="id", qr_code="code") 25 | await Foo.all().delete() 26 | 27 | 28 | @pytest.mark.anyio 29 | async def test_3(): 30 | await assert_not_unique() 31 | 32 | 33 | @pytest.mark.anyio 34 | async def test_4(): 35 | await Foo.all().delete() 36 | await Foo.create(package_order_id="id", qr_code="code") 37 | with pytest.raises(OperationalError): 38 | await Foo.create(package_order_id="id2", qr_code="code2") 39 | await Foo.create(package_order_id="id2", qr_code="code2", name="2") 40 | 41 | 42 | @pytest.mark.anyio 43 | async def test_5(): 44 | await Foo.create(package_order_id="id3", qr_code="code3", name="2") 45 | await Foo.create(package_order_id="id4", qr_code="code4", name="2") 46 | await Foo.create(package_order_id="id5", qr_code="code5") 47 | await Foo.create(package_order_id="id6", qr_code="code6") 48 | -------------------------------------------------------------------------------- /tests/assets/class_var_config/app/core/config.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from typing import Any, Literal 4 | 5 | from pydantic import computed_field 6 | from pydantic_core import MultiHostUrl 7 | from pydantic_settings import BaseSettings, SettingsConfigDict 8 | 9 | 10 | class Settings(BaseSettings): 11 | model_config = SettingsConfigDict( 12 | # Use top level .env file (one level above ./backend/) 13 | env_file=".env", 14 | env_ignore_empty=True, 15 | extra="ignore", 16 | ) 17 | ENVIRONMENT: Literal["local", "staging", "production"] = "local" 18 | POSTGRES_SERVER: str = "127.0.0.1" 19 | POSTGRES_PORT: int = 5432 20 | POSTGRES_USER: str = "postgres" 21 | POSTGRES_PASSWORD: str = "postgres" 22 | POSTGRES_DB: str = "aerich_dev" 23 | 24 | @computed_field # type: ignore[prop-decorator] 25 | @property 26 | def DATABASE_URI(self) -> MultiHostUrl: 27 | return MultiHostUrl.build( 28 | scheme="postgres", 29 | username=self.POSTGRES_USER, 30 | password=self.POSTGRES_PASSWORD, 31 | host=self.POSTGRES_SERVER, 32 | port=self.POSTGRES_PORT, 33 | path=self.POSTGRES_DB, 34 | ) 35 | 36 | @computed_field # type: ignore[prop-decorator] 37 | @property 38 | def TORTOISE_ORM(self) -> dict[str, dict[str, Any]]: 39 | db_url = ( 40 | "sqlite://db.sqlite3" 41 | if self.ENVIRONMENT == "local" 42 | else str(self.DATABASE_URI) 43 | ) 44 | return { 45 | "connections": {"default": db_url}, 46 | "apps": {"models": {"models": ["app.models", "aerich.models"]}}, 47 | } 48 | 49 | 50 | settings = Settings() 51 | -------------------------------------------------------------------------------- /tests/assets/remove_constraint/_tests.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from models import Foo, Sth 3 | from tortoise.exceptions import OperationalError 4 | 5 | 6 | @pytest.mark.anyio 7 | async def test_init_db(): 8 | await Foo.create(a=1, b=1, c=1) 9 | with pytest.raises(OperationalError): 10 | await Foo.create(a=1, b=1, c=2) 11 | with pytest.raises(OperationalError): 12 | await Foo.create(a=1, b=2, c=1) 13 | with pytest.raises(OperationalError): 14 | await Sth.create(a=1, b=1, c=1, d=1) 15 | await Sth.create(a=1, b=1, c=1, d=1) 16 | 17 | 18 | @pytest.mark.anyio 19 | async def test_models_2(): 20 | await Foo.create(a=2, b=2, c=2) 21 | await Foo.create(a=2, b=2, c=3) 22 | with pytest.raises(OperationalError): 23 | await Foo.create(a=2, b=2, c=3) 24 | await Sth.create(a=2, b=2, c=2, d=2) 25 | await Sth.create(a=3, b=2, c=2, d=2) 26 | with pytest.raises(OperationalError): 27 | await Sth.create(a=3, b=2, c=2, d=2) 28 | 29 | 30 | @pytest.mark.anyio 31 | async def test_models_3(): 32 | await Sth.create(a=3, b=3, c=3, d=3, e=3, f=3) 33 | with pytest.raises(OperationalError): 34 | await Sth.create(a=3, b=3, c=3, d=3, e=3, f=4) 35 | with pytest.raises(OperationalError): 36 | await Sth.create(a=3, b=4, c=3, d=3, e=3, f=3) 37 | 38 | 39 | @pytest.mark.anyio 40 | async def test_models_4(): 41 | from models import New 42 | 43 | await New.create(a=1, b=1) 44 | with pytest.raises(OperationalError): 45 | await New.create(a=1, b=1) 46 | 47 | 48 | @pytest.mark.anyio 49 | async def test_models_5(): 50 | from models import New 51 | 52 | await New.create(a2=2, b2=2) 53 | with pytest.raises(OperationalError): 54 | await New.create(a2=2, b2=2) 55 | -------------------------------------------------------------------------------- /tests/assets/delete_model_with_m2m_field/_tests.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from models import User 3 | from tortoise import connections 4 | from tortoise.exceptions import OperationalError 5 | 6 | 7 | async def _run_sql(statement: str) -> list[dict]: 8 | conn = connections.get("default") 9 | return await conn.execute_query_dict(statement) 10 | 11 | 12 | async def select_even_team_m2m() -> list[dict]: 13 | return await _run_sql("SELECT * FROM event_team") 14 | 15 | 16 | async def select_user_group_m2m() -> list[dict]: 17 | return await _run_sql("SELECT * FROM group_user") 18 | 19 | 20 | @pytest.mark.anyio 21 | async def test_1(): 22 | from models import Event, Group, Team 23 | 24 | e1 = await Event.create(name="e1") 25 | t1 = await Team.create(name="t1") 26 | await e1.participants.add(t1) 27 | u1 = await User.create(name="u1") 28 | g1 = await Group.create(name="g1") 29 | await g1.users.add(u1) 30 | assert (await t1.events.all().count()) == 1 31 | assert (await u1.groups.all().count()) == 1 32 | val = await select_even_team_m2m() 33 | assert val == [{"event_id": e1.id, "team_id": t1.id}] 34 | 35 | 36 | @pytest.mark.anyio 37 | async def test_2(): 38 | from models import Group 39 | 40 | u2 = await User.create(name="u2") 41 | g2 = await Group.create(name="g2") 42 | await g2.users.add(u2) 43 | assert (await u2.groups.all().count()) == 1 44 | val = await select_user_group_m2m() 45 | assert {"group_id": g2.id, "user_id": u2.id} in val 46 | 47 | with pytest.raises(OperationalError, match="no such table"): 48 | await select_even_team_m2m() 49 | 50 | 51 | @pytest.mark.anyio 52 | async def test_3(): 53 | await User.create(name="u3") 54 | assert (await User.all().count()) >= 1 55 | with pytest.raises(OperationalError, match="no such table"): 56 | await select_user_group_m2m() 57 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | src_dir = aerich 2 | checkfiles = $(src_dir) tests/ conftest.py 3 | py_warn = PYTHONDEVMODE=1 4 | pytest_opts = --cov=$(src_dir) --cov-append --tb=native -q 5 | MYSQL_HOST ?= "127.0.0.1" 6 | MYSQL_PORT ?= 3306 7 | MYSQL_PASS ?= "123456" 8 | POSTGRES_HOST ?= "127.0.0.1" 9 | POSTGRES_PORT ?= 5432 10 | POSTGRES_PASS ?= 123456 11 | 12 | up: 13 | @uv lock --upgrade 14 | 15 | deps: 16 | @uv sync --all-extras --all-groups --no-extra asyncmy --no-group=vector $(options) 17 | 18 | _style: 19 | @ruff format $(checkfiles) 20 | @ruff check --fix $(checkfiles) 21 | style: deps _style 22 | 23 | _codeqc: 24 | mypy $(checkfiles) 25 | bandit -c pyproject.toml -r $(checkfiles) 26 | twine check dist/* 27 | codeqc: build _codeqc 28 | 29 | _check: _build 30 | @ruff format --check $(checkfiles) || (echo "Please run 'make style' to auto-fix style issues" && false) 31 | @ruff check $(checkfiles) 32 | $(MAKE) _codeqc 33 | check: deps _check 34 | 35 | _lint: _build _style _codeqc 36 | lint: deps _lint 37 | 38 | test: deps 39 | $(py_warn) pytest $(pytest_opts) 40 | 41 | test_sqlite: 42 | $(py_warn) TEST_DB=sqlite://:memory: pytest $(pytest_opts) 43 | 44 | test_mysql: 45 | $(py_warn) TEST_DB="mysql://root:$(MYSQL_PASS)@$(MYSQL_HOST):$(MYSQL_PORT)/test_\{\}" pytest -vv -s $(pytest_opts) 46 | 47 | test_postgres: 48 | $(py_warn) TEST_DB="postgres://postgres:$(POSTGRES_PASS)@$(POSTGRES_HOST):$(POSTGRES_PORT)/test_\{\}" pytest -vv -s $(pytest_opts) 49 | 50 | test_postgres_vector: 51 | $(py_warn) AERICH_TEST_VECTOR=1 TEST_DB="postgres://postgres:$(POSTGRES_PASS)@$(POSTGRES_HOST):$(POSTGRES_PORT)/test_\{\}" pytest -vv -s tests/test_inspectdb.py::test_inspect_vector $(pytest_opts) 52 | 53 | test_psycopg: 54 | $(py_warn) TEST_DB="psycopg://postgres:$(POSTGRES_PASS)@$(POSTGRES_HOST):$(POSTGRES_PORT)/test_\{\}" pytest -vv -s $(pytest_opts) 55 | 56 | _testall: test_sqlite test_postgres test_mysql 57 | testall: deps _testall 58 | 59 | report: 60 | coverage report -m 61 | 62 | _build: 63 | rm -fR dist/ 64 | uv build 65 | build: deps _build 66 | 67 | ci: build _check _testall 68 | -------------------------------------------------------------------------------- /tests/assets/class_var_config/pyproject.toml: -------------------------------------------------------------------------------- 1 | [project] 2 | name = "app" 3 | version = "0.1.0" 4 | description = "" 5 | requires-python = ">=3.10,<4.0" 6 | dependencies = [ 7 | "fastapi[standard]<1.0.0,>=0.114.2", 8 | "python-multipart<1.0.0,>=0.0.7", 9 | "email-validator<3.0.0.0,>=2.1.0.post1", 10 | "passlib[bcrypt]<2.0.0,>=1.7.4", 11 | "tenacity<9.0.0,>=8.2.3", 12 | "pydantic>2.0", 13 | "emails<1.0,>=0.6", 14 | "jinja2<4.0.0,>=3.1.4", 15 | "alembic<2.0.0,>=1.12.1", 16 | "httpx<1.0.0,>=0.25.1", 17 | "psycopg[binary]<4.0.0,>=3.1.13", 18 | "sqlmodel<1.0.0,>=0.0.21", 19 | # Pin bcrypt until passlib supports the latest 20 | "bcrypt==4.3.0", 21 | "pydantic-settings<3.0.0,>=2.2.1", 22 | "sentry-sdk[fastapi]<2.0.0,>=1.40.6", 23 | "pyjwt<3.0.0,>=2.8.0", 24 | ] 25 | 26 | [tool.uv] 27 | dev-dependencies = [ 28 | "pytest<8.0.0,>=7.4.3", 29 | "mypy<2.0.0,>=1.8.0", 30 | "ruff<1.0.0,>=0.2.2", 31 | "pre-commit<4.0.0,>=3.6.2", 32 | "types-passlib<2.0.0.0,>=1.7.7.20240106", 33 | "coverage<8.0.0,>=7.4.3", 34 | ] 35 | 36 | [build-system] 37 | requires = ["hatchling"] 38 | build-backend = "hatchling.build" 39 | 40 | [tool.mypy] 41 | strict = true 42 | exclude = ["venv", ".venv", "alembic"] 43 | 44 | [tool.ruff] 45 | target-version = "py310" 46 | exclude = ["alembic"] 47 | 48 | [tool.ruff.lint] 49 | select = [ 50 | "E", # pycodestyle errors 51 | "W", # pycodestyle warnings 52 | "F", # pyflakes 53 | "I", # isort 54 | "B", # flake8-bugbear 55 | "C4", # flake8-comprehensions 56 | "UP", # pyupgrade 57 | "ARG001", # unused arguments in functions 58 | ] 59 | ignore = [ 60 | "E501", # line too long, handled by black 61 | "B008", # do not perform function calls in argument defaults 62 | "W191", # indentation contains tabs 63 | "B904", # Allow raising exceptions without from e, for HTTPException 64 | ] 65 | 66 | [tool.ruff.lint.pyupgrade] 67 | # Preserve types, even if a file imports `from __future__ import annotations`. 68 | keep-runtime-typing = true 69 | -------------------------------------------------------------------------------- /tests/assets/command_programmatically/_tests.py: -------------------------------------------------------------------------------- 1 | import re 2 | from pathlib import Path 3 | 4 | import pytest 5 | from models import Foo 6 | from settings import TORTOISE_ORM 7 | 8 | from aerich import Command, TortoiseContext 9 | from aerich.exceptions import NotInitedError 10 | 11 | 12 | @pytest.fixture(scope="session") 13 | def anyio_backend() -> str: 14 | return "asyncio" 15 | 16 | 17 | @pytest.fixture 18 | async def init_connections(): 19 | async with TortoiseContext(TORTOISE_ORM): 20 | yield 21 | 22 | 23 | @pytest.mark.anyio 24 | async def test_command_not_inited(): 25 | command = Command(TORTOISE_ORM) 26 | message = "You have to call .init() first before migrate" 27 | with pytest.raises(NotInitedError, match=re.escape(message)): 28 | await command.migrate() 29 | 30 | 31 | @pytest.mark.anyio 32 | async def test_init_command_by_await(): 33 | command = await Command(TORTOISE_ORM) 34 | if not list(Path("migrations/models").glob("*.py")): 35 | await command.init_db(safe=True) 36 | await command.migrate() 37 | await command.upgrade() 38 | await command.close() 39 | 40 | 41 | @pytest.mark.anyio 42 | async def test_init_command_by_async_with(): 43 | async with Command(TORTOISE_ORM) as command: 44 | if not list(Path("migrations/models").glob("*.py")): 45 | await command.init_db(safe=True) 46 | await command.migrate() 47 | await command.upgrade() 48 | 49 | 50 | @pytest.mark.anyio 51 | async def test_init_command_by_init_func(): 52 | command = Command(TORTOISE_ORM) 53 | await command.init() 54 | if not list(Path("migrations/models").glob("*.py")): 55 | await command.init_db(safe=True) 56 | await command.migrate() 57 | await command.upgrade() 58 | await command.close() 59 | 60 | 61 | @pytest.mark.anyio 62 | async def test_migrate_upgrade(init_connections): 63 | async with Command(TORTOISE_ORM) as command: 64 | await command.migrate() 65 | await command.upgrade() 66 | assert list(Path("migrations/models").glob("1_*.py")) 67 | await Foo.create(a=1, b=2) 68 | -------------------------------------------------------------------------------- /tests/assets/migrate_no_input/_tests.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from pathlib import Path 4 | 5 | import pytest 6 | from asyncclick.testing import CliRunner 7 | 8 | from aerich.cli import cli 9 | from aerich.migrate import Migrate 10 | 11 | 12 | @pytest.fixture(scope="session") 13 | def anyio_backend() -> str: 14 | return "asyncio" 15 | 16 | 17 | @pytest.mark.anyio 18 | async def test_migrate(): 19 | runner = CliRunner() 20 | # Default to abort without deleting previous generated migration files 21 | result = await runner.invoke(cli, ["migrate"], input="\n") 22 | assert not result.exception 23 | assert "it" in result.output 24 | warning_msg = ( 25 | "Aborted! You may need to run `aerich heads` to list avaliable unapplied migrations." 26 | ) 27 | assert warning_msg in result.output 28 | migrate_dir = Path(Migrate.migrate_location) 29 | extra_migration_file = migrate_dir.joinpath("1_datetime_update.py") 30 | extra_migration_file.touch() 31 | pre_migration_files = list(migrate_dir.glob("1_*.py")) 32 | updated_at_0 = pre_migration_files[0].stat().st_mtime 33 | # Delete migration files that with same version num when explicit input True 34 | result = await runner.invoke(cli, ["migrate"], input="True\n") 35 | assert not result.exception 36 | assert "them" in result.output 37 | assert all(i.name in result.output for i in pre_migration_files) 38 | assert not extra_migration_file.exists() 39 | new_migration_files = list(migrate_dir.glob("1_*.py")) 40 | assert len(new_migration_files) == 1 41 | updated_at = new_migration_files[0].stat().st_mtime 42 | assert updated_at > updated_at_0 43 | # Delete migration files without ask for prompt when --no-input passed 44 | result = await runner.invoke(cli, ["migrate", "--no-input"]) 45 | assert not result.exception 46 | assert "them" not in result.output and "it" not in result.output 47 | latest_migration_files = list(migrate_dir.glob("1_*.py")) 48 | assert len(latest_migration_files) == 1 49 | updated_at_2 = latest_migration_files[0].stat().st_mtime 50 | assert updated_at_2 > updated_at 51 | -------------------------------------------------------------------------------- /tests/test_inspectdb.py: -------------------------------------------------------------------------------- 1 | import sys 2 | from pathlib import Path 3 | 4 | import pytest 5 | 6 | from tests._utils import ( 7 | Dialect, 8 | prepare_py_files, 9 | requires_dialect, 10 | requires_env, 11 | run_in_subprocess, 12 | skip_dialect, 13 | tmp_daily_db, 14 | ) 15 | 16 | 17 | # TODO: remove skip decorator to test sqlite after #384 fixed 18 | @skip_dialect("sqlite") 19 | def test_inspect(tmp_work_dir): 20 | prepare_py_files("fake", with_testing_models=True) 21 | with tmp_daily_db(): 22 | _test_inspect() 23 | 24 | 25 | def _test_inspect() -> None: 26 | ok, out = run_in_subprocess("aerich init -t settings.TORTOISE_ORM") 27 | if not ok: 28 | print("Failed to init:", out) 29 | ok, out = run_in_subprocess("aerich init-db") 30 | if not ok: 31 | print("ERROR init-db:", out) 32 | ok, ret = run_in_subprocess("aerich inspectdb -t product") 33 | assert ok, ret 34 | assert ret.startswith("from tortoise import Model, fields") 35 | assert "primary_key=True" in ret 36 | assert "fields.DatetimeField" in ret 37 | assert "fields.FloatField" in ret 38 | assert "fields.UUIDField" in ret 39 | if Dialect.is_mysql(): 40 | assert "db_index=True" in ret 41 | 42 | 43 | @requires_dialect("postgres") 44 | @pytest.mark.skipif( 45 | sys.version_info < (3, 10), reason="tortoise-vector requires python3.10 or higher" 46 | ) 47 | @requires_env("AERICH_TEST_VECTOR") 48 | def test_inspect_vector(tmp_work_dir: Path): 49 | prepare_py_files("postgres_vector", suffix=".*") 50 | with tmp_daily_db(): 51 | ok, out = run_in_subprocess("aerich init-db --pre='CREATE EXTENSION IF NOT EXISTS vector'") 52 | if not ok: 53 | print("ERROR init-db:", out) 54 | ok, ret = run_in_subprocess("aerich inspectdb -t foo") 55 | assert ok, ret 56 | expected = """ 57 | from tortoise import Model, fields 58 | from tortoise.contrib.postgres.fields import TSVectorField 59 | from tortoise_vector.field import VectorField 60 | 61 | class Foo(Model): 62 | id = fields.IntField(primary_key=True) 63 | a = fields.IntField() 64 | b = TSVectorField() 65 | c = VectorField() 66 | """ 67 | assert expected.strip() in ret 68 | -------------------------------------------------------------------------------- /tests/assets/fake/_tests.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from models import NewModel 3 | from models_second import Config 4 | from tortoise.exceptions import OperationalError 5 | 6 | try: 7 | # This error does not translate to tortoise's OperationalError 8 | from psycopg.errors import UndefinedColumn 9 | except ImportError: 10 | errors = (OperationalError,) 11 | else: 12 | errors = (OperationalError, UndefinedColumn) 13 | 14 | 15 | @pytest.mark.anyio 16 | async def test_init_db(): 17 | m1 = await NewModel.filter(name="") 18 | assert isinstance(m1, list) 19 | m2 = await Config.filter(key="") 20 | assert isinstance(m2, list) 21 | await NewModel.create(name="") 22 | await Config.create(key="", label="", value={}) 23 | 24 | 25 | @pytest.mark.anyio 26 | async def test_fake_field_1(): 27 | assert "field_1" in NewModel._meta.fields_map 28 | assert "field_1" in Config._meta.fields_map 29 | with pytest.raises(errors): 30 | await NewModel.create(name="", field_1=1) 31 | with pytest.raises(errors): 32 | await Config.create(key="", label="", value={}, field_1=1) 33 | 34 | obj1 = NewModel(name="", field_1=1) 35 | with pytest.raises(errors): 36 | await obj1.save() 37 | obj1 = NewModel(name="") 38 | with pytest.raises(errors): 39 | await obj1.save() 40 | with pytest.raises(errors): 41 | obj1 = await NewModel.first() 42 | obj1 = await NewModel.all().first().values("id", "name") 43 | assert obj1 and obj1["id"] 44 | 45 | obj2 = Config(key="", label="", value={}, field_1=1) 46 | with pytest.raises(errors): 47 | await obj2.save() 48 | obj2 = Config(key="", label="", value={}) 49 | with pytest.raises(errors): 50 | await obj2.save() 51 | with pytest.raises(errors): 52 | obj2 = await Config.first() 53 | obj2 = await Config.all().first().values("id", "key") 54 | assert obj2 and obj2["id"] 55 | 56 | 57 | @pytest.mark.anyio 58 | async def test_fake_field_2(): 59 | assert "field_2" in NewModel._meta.fields_map 60 | assert "field_2" in Config._meta.fields_map 61 | with pytest.raises(errors): 62 | await NewModel.create(name="") 63 | with pytest.raises(errors): 64 | await Config.create(key="", label="", value={}) 65 | -------------------------------------------------------------------------------- /tests/test_remove_unique_constraint.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import functools 4 | import shutil 5 | from pathlib import Path 6 | 7 | from tests._utils import prepare_py_files, run_shell, skip_dialect, tmp_daily_db 8 | 9 | 10 | def update_model(from_file: str, parent: Path) -> None: 11 | abspath = parent / from_file 12 | shutil.copy(abspath, "models.py") 13 | 14 | 15 | # TODO: remove skip decorator to test sqlite if alter-column supported 16 | @skip_dialect("sqlite") 17 | def test_remove_unique_constraint(tmp_work_dir): 18 | asset_dir = prepare_py_files("remove_constraint") 19 | with tmp_daily_db(): 20 | _test_remove_unique_constraint(asset_dir) 21 | 22 | 23 | def _test_remove_unique_constraint(asset_dir: Path) -> None: 24 | _update_model = functools.partial(update_model, parent=asset_dir) 25 | output = run_shell("aerich init -t settings.TORTOISE_ORM") 26 | assert "Success" in output 27 | output = run_shell("aerich init-db") 28 | assert "Success" in output 29 | output = run_shell("pytest _tests.py::test_init_db") 30 | assert "error" not in output.lower() 31 | _update_model("models_2.py") 32 | output = run_shell("aerich migrate") 33 | assert "Success" in output 34 | output = run_shell("aerich upgrade") 35 | assert "Success" in output 36 | output = run_shell("pytest _tests.py::test_models_2") 37 | assert "error" not in output.lower() 38 | _update_model("models_3.py") 39 | output = run_shell("aerich migrate") 40 | assert "Success" in output 41 | output = run_shell("aerich upgrade") 42 | assert "Success" in output 43 | output = run_shell("pytest _tests.py::test_models_3") 44 | assert "error" not in output.lower() 45 | _update_model("models_4.py") 46 | output = run_shell("aerich migrate") 47 | assert "Success" in output 48 | output = run_shell("aerich upgrade") 49 | assert "Success" in output 50 | output = run_shell("pytest _tests.py::test_models_4") 51 | assert "error" not in output.lower() 52 | _update_model("models_5.py") 53 | output = run_shell("aerich migrate --no-input") 54 | assert "Success" in output 55 | output = run_shell("aerich upgrade") 56 | assert "Success" in output 57 | output = run_shell("pytest _tests.py::test_models_5") 58 | assert "error" not in output.lower() 59 | -------------------------------------------------------------------------------- /tests/models_second.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | from enum import IntEnum 3 | 4 | from tortoise import Model, fields 5 | 6 | 7 | class ProductType(IntEnum): 8 | article = 1 9 | page = 2 10 | 11 | 12 | class PermissionAction(IntEnum): 13 | create = 1 14 | delete = 2 15 | update = 3 16 | read = 4 17 | 18 | 19 | class Status(IntEnum): 20 | on = 1 21 | off = 0 22 | 23 | 24 | class User(Model): 25 | username = fields.CharField(max_length=20, unique=True) 26 | password = fields.CharField(max_length=200) 27 | last_login = fields.DatetimeField(description="Last Login", default=datetime.datetime.now) 28 | is_active = fields.BooleanField(default=True, description="Is Active") 29 | is_superuser = fields.BooleanField(default=False, description="Is SuperUser") 30 | avatar = fields.CharField(max_length=200, default="") 31 | intro = fields.TextField(default="") 32 | 33 | 34 | class Email(Model): 35 | email = fields.CharField(max_length=200) 36 | is_primary = fields.BooleanField(default=False) 37 | user: fields.ForeignKeyRelation[User] = fields.ForeignKeyField( 38 | "models_second.User", db_constraint=False 39 | ) 40 | 41 | 42 | class Category(Model): 43 | slug = fields.CharField(max_length=200) 44 | name = fields.CharField(max_length=200) 45 | user: fields.ForeignKeyRelation[User] = fields.ForeignKeyField( 46 | "models_second.User", description="User" 47 | ) 48 | created_at = fields.DatetimeField(auto_now_add=True) 49 | 50 | 51 | class Product(Model): 52 | categories: fields.ManyToManyRelation[Category] = fields.ManyToManyField( 53 | "models_second.Category" 54 | ) 55 | name = fields.CharField(max_length=50) 56 | view_num = fields.IntField(description="View Num") 57 | sort = fields.IntField() 58 | is_reviewed = fields.BooleanField(description="Is Reviewed") 59 | type: int = fields.IntEnumField( 60 | ProductType, description="Product Type", source_field="type_db_alias" 61 | ) 62 | image = fields.CharField(max_length=200) 63 | body = fields.TextField() 64 | created_at = fields.DatetimeField(auto_now_add=True) 65 | 66 | 67 | class Config(Model): 68 | label = fields.CharField(max_length=200) 69 | key = fields.CharField(max_length=20) 70 | value: dict = fields.JSONField() 71 | status: Status = fields.IntEnumField(Status, default=Status.on) 72 | -------------------------------------------------------------------------------- /tests/assets/sqlite_migrate/_tests.py: -------------------------------------------------------------------------------- 1 | import uuid 2 | 3 | import pytest 4 | from models import Foo 5 | from tortoise.exceptions import IntegrityError 6 | 7 | 8 | @pytest.mark.anyio 9 | async def test_allow_duplicate() -> None: 10 | await Foo.all().delete() 11 | await Foo.create(name="foo") 12 | obj = await Foo.create(name="foo") 13 | assert (await Foo.all().count()) == 2 14 | await obj.delete() 15 | 16 | 17 | @pytest.mark.anyio 18 | async def test_unique_is_true() -> None: 19 | with pytest.raises(IntegrityError): 20 | await Foo.create(name="foo") 21 | await Foo.create(name="foo") 22 | 23 | 24 | @pytest.mark.anyio 25 | async def test_add_unique_field() -> None: 26 | if not await Foo.filter(age=0).exists(): 27 | await Foo.create(name="0_" + uuid.uuid4().hex, age=0) 28 | with pytest.raises(IntegrityError): 29 | await Foo.create(name=uuid.uuid4().hex, age=0) 30 | 31 | 32 | @pytest.mark.anyio 33 | async def test_drop_unique_field() -> None: 34 | name = "1_" + uuid.uuid4().hex 35 | await Foo.create(name=name, age=0) 36 | assert await Foo.filter(name=name).exists() 37 | 38 | 39 | @pytest.mark.anyio 40 | async def test_with_age_field() -> None: 41 | name = "2_" + uuid.uuid4().hex 42 | await Foo.create(name=name, age=0) 43 | obj = await Foo.get(name=name) 44 | assert obj.age == 0 45 | 46 | 47 | @pytest.mark.anyio 48 | async def test_without_age_field() -> None: 49 | name = "3_" + uuid.uuid4().hex 50 | await Foo.create(name=name, age=0) 51 | obj = await Foo.get(name=name) 52 | assert getattr(obj, "age", None) is None 53 | 54 | 55 | @pytest.mark.anyio 56 | async def test_m2m_with_custom_through() -> None: 57 | from models import FooGroup, Group 58 | 59 | name = "4_" + uuid.uuid4().hex 60 | foo = await Foo.create(name=name) 61 | group = await Group.create(name=name + "1") 62 | await FooGroup.all().delete() 63 | await foo.groups.add(group) 64 | foo_group = await FooGroup.get(foo=foo, group=group) 65 | assert not foo_group.is_active 66 | 67 | 68 | @pytest.mark.anyio 69 | async def test_add_m2m_field_after_init_db() -> None: 70 | from models import Group 71 | 72 | name = "5_" + uuid.uuid4().hex 73 | foo = await Foo.create(name=name) 74 | group = await Group.create(name=name + "1") 75 | await foo.groups.add(group) 76 | assert (await group.users.all().first()) == foo 77 | -------------------------------------------------------------------------------- /tests/assets/sqlite_old_style/_tests.py: -------------------------------------------------------------------------------- 1 | import uuid 2 | 3 | import pytest 4 | from models import Foo 5 | from tortoise.exceptions import IntegrityError 6 | 7 | 8 | @pytest.mark.anyio 9 | async def test_allow_duplicate() -> None: 10 | await Foo.all().delete() 11 | await Foo.create(name="foo") 12 | obj = await Foo.create(name="foo") 13 | assert (await Foo.all().count()) == 2 14 | await obj.delete() 15 | 16 | 17 | @pytest.mark.anyio 18 | async def test_unique_is_true() -> None: 19 | with pytest.raises(IntegrityError): 20 | await Foo.create(name="foo") 21 | await Foo.create(name="foo") 22 | 23 | 24 | @pytest.mark.anyio 25 | async def test_add_unique_field() -> None: 26 | if not await Foo.filter(age=0).exists(): 27 | await Foo.create(name="0_" + uuid.uuid4().hex, age=0) 28 | with pytest.raises(IntegrityError): 29 | await Foo.create(name=uuid.uuid4().hex, age=0) 30 | 31 | 32 | @pytest.mark.anyio 33 | async def test_drop_unique_field() -> None: 34 | name = "1_" + uuid.uuid4().hex 35 | await Foo.create(name=name, age=0) 36 | assert await Foo.filter(name=name).exists() 37 | 38 | 39 | @pytest.mark.anyio 40 | async def test_with_age_field() -> None: 41 | name = "2_" + uuid.uuid4().hex 42 | await Foo.create(name=name, age=0) 43 | obj = await Foo.get(name=name) 44 | assert obj.age == 0 45 | 46 | 47 | @pytest.mark.anyio 48 | async def test_without_age_field() -> None: 49 | name = "3_" + uuid.uuid4().hex 50 | await Foo.create(name=name, age=0) 51 | obj = await Foo.get(name=name) 52 | assert getattr(obj, "age", None) is None 53 | 54 | 55 | @pytest.mark.anyio 56 | async def test_m2m_with_custom_through() -> None: 57 | from models import FooGroup, Group 58 | 59 | name = "4_" + uuid.uuid4().hex 60 | foo = await Foo.create(name=name) 61 | group = await Group.create(name=name + "1") 62 | await FooGroup.all().delete() 63 | await foo.groups.add(group) 64 | foo_group = await FooGroup.get(foo=foo, group=group) 65 | assert not foo_group.is_active 66 | 67 | 68 | @pytest.mark.anyio 69 | async def test_add_m2m_field_after_init_db() -> None: 70 | from models import Group 71 | 72 | name = "5_" + uuid.uuid4().hex 73 | foo = await Foo.create(name=name) 74 | group = await Group.create(name=name + "1") 75 | await foo.groups.add(group) 76 | assert (await group.users.all().first()) == foo 77 | -------------------------------------------------------------------------------- /aerich/inspectdb/sqlite.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from aerich.inspectdb import Column, FieldMapDict, Inspect 4 | 5 | 6 | class InspectSQLite(Inspect): 7 | @property 8 | def field_map(self) -> FieldMapDict: 9 | return { 10 | "INTEGER": self.int_field, 11 | "INT": self.bool_field, 12 | "SMALLINT": self.smallint_field, 13 | "VARCHAR": self.char_field, 14 | "TEXT": self.text_field, 15 | "TIMESTAMP": self.datetime_field, 16 | "REAL": self.float_field, 17 | "BIGINT": self.bigint_field, 18 | "DATE": self.date_field, 19 | "TIME": self.time_field, 20 | "JSON": self.json_field, 21 | "BLOB": self.binary_field, 22 | } 23 | 24 | async def get_columns(self, table: str) -> list[Column]: 25 | columns = [] 26 | sql = f"PRAGMA table_info({table})" 27 | ret = await self.conn.execute_query_dict(sql) 28 | columns_index = await self._get_columns_index(table) 29 | for row in ret: 30 | try: 31 | length = row["type"].split("(")[1].split(")")[0] 32 | except IndexError: 33 | length = None 34 | columns.append( 35 | Column( 36 | name=row["name"], 37 | data_type=row["type"].split("(")[0], 38 | null=row["notnull"] == 0, 39 | default=row["dflt_value"], 40 | length=length, 41 | pk=row["pk"] == 1, 42 | unique=columns_index.get(row["name"]) == "unique", 43 | index=columns_index.get(row["name"]) == "index", 44 | ) 45 | ) 46 | return columns 47 | 48 | async def _get_columns_index(self, table: str) -> dict[str, str]: 49 | sql = f"PRAGMA index_list ({table})" 50 | indexes = await self.conn.execute_query_dict(sql) 51 | ret = {} 52 | for index in indexes: 53 | sql = f"PRAGMA index_info({index['name']})" 54 | index_info = (await self.conn.execute_query_dict(sql))[0] 55 | ret[index_info["name"]] = "unique" if index["unique"] else "index" 56 | return ret 57 | 58 | async def get_all_tables(self) -> list[str]: 59 | sql = "select tbl_name from sqlite_master where type='table' and name!='sqlite_sequence'" 60 | ret = await self.conn.execute_query_dict(sql) 61 | return list(map(lambda x: x["tbl_name"], ret)) 62 | -------------------------------------------------------------------------------- /conftest.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import os 4 | from collections.abc import Generator 5 | from pathlib import Path 6 | 7 | import pytest 8 | from tortoise import Tortoise, expand_db_url 9 | from tortoise.backends.base_postgres.schema_generator import BasePostgresSchemaGenerator 10 | from tortoise.backends.mysql.schema_generator import MySQLSchemaGenerator 11 | from tortoise.backends.sqlite.schema_generator import SqliteSchemaGenerator 12 | from tortoise.contrib.test import MEMORY_SQLITE 13 | 14 | from aerich.ddl.mysql import MysqlDDL 15 | from aerich.ddl.postgres import PostgresDDL 16 | from aerich.ddl.sqlite import SqliteDDL 17 | from aerich.migrate import Migrate 18 | from tests._utils import chdir, init_db 19 | 20 | db_url = os.getenv("TEST_DB", MEMORY_SQLITE) 21 | db_url_second = os.getenv("TEST_DB_SECOND", MEMORY_SQLITE) 22 | try: 23 | default_db = expand_db_url(db_url, testing=True) 24 | except KeyError as e: 25 | if str(e) == "'/'": 26 | # Auto convert invalid path for Windows 27 | db_url = db_url.replace("/{/}", "{}") 28 | default_db = expand_db_url(db_url, testing=True) 29 | else: 30 | raise e 31 | 32 | tortoise_orm = { 33 | "connections": { 34 | "default": default_db, 35 | "second": expand_db_url(db_url_second, testing=True), 36 | }, 37 | "apps": { 38 | "models": {"models": ["tests.models", "aerich.models"], "default_connection": "default"}, 39 | "models_second": {"models": ["tests.models_second"], "default_connection": "second"}, 40 | }, 41 | } 42 | TEST_DIR = Path(__file__).parent / "tests" 43 | 44 | 45 | @pytest.fixture(scope="function", autouse=True) 46 | def reset_migrate() -> None: 47 | Migrate.upgrade_operators = [] 48 | Migrate.downgrade_operators = [] 49 | Migrate._upgrade_fk_m2m_index_operators = [] 50 | Migrate._downgrade_fk_m2m_index_operators = [] 51 | Migrate._upgrade_m2m = [] 52 | Migrate._downgrade_m2m = [] 53 | 54 | 55 | @pytest.fixture(scope="session") 56 | def anyio_backend() -> str: 57 | return "asyncio" 58 | 59 | 60 | @pytest.fixture(scope="session", autouse=True) 61 | async def initialize_tests(anyio_backend): 62 | await init_db(tortoise_orm) 63 | client = Tortoise.get_connection("default") 64 | if client.schema_generator is MySQLSchemaGenerator: 65 | Migrate.ddl = MysqlDDL(client) 66 | elif client.schema_generator is SqliteSchemaGenerator: 67 | Migrate.ddl = SqliteDDL(client) 68 | elif issubclass(client.schema_generator, BasePostgresSchemaGenerator): 69 | Migrate.ddl = PostgresDDL(client) 70 | Migrate.dialect = Migrate.ddl.DIALECT 71 | try: 72 | yield 73 | finally: 74 | await Tortoise._drop_databases() 75 | 76 | 77 | @pytest.fixture 78 | def tmp_work_dir(tmp_path: Path) -> Generator[Path]: 79 | with chdir(tmp_path): 80 | yield tmp_path 81 | -------------------------------------------------------------------------------- /aerich/inspectdb/mysql.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from aerich.inspectdb import Column, FieldMapDict, Inspect 4 | 5 | 6 | class InspectMySQL(Inspect): 7 | @property 8 | def field_map(self) -> FieldMapDict: 9 | return { 10 | "int": self.int_field, 11 | "smallint": self.smallint_field, 12 | "tinyint": self.bool_field, 13 | "bigint": self.bigint_field, 14 | "varchar": self.char_field, 15 | "char": self.uuid_field, 16 | "longtext": self.text_field, 17 | "text": self.text_field, 18 | "datetime": self.datetime_field, 19 | "float": self.float_field, 20 | "double": self.float_field, 21 | "date": self.date_field, 22 | "time": self.time_field, 23 | "decimal": self.decimal_field, 24 | "json": self.json_field, 25 | "longblob": self.binary_field, 26 | } 27 | 28 | async def get_all_tables(self) -> list[str]: 29 | sql = "select TABLE_NAME from information_schema.TABLES where TABLE_SCHEMA=%s" 30 | ret = await self.conn.execute_query_dict(sql, [self.database]) 31 | return list(map(lambda x: x["TABLE_NAME"], ret)) 32 | 33 | async def get_columns(self, table: str) -> list[Column]: 34 | columns = [] 35 | sql = """select c.*, s.NON_UNIQUE, s.INDEX_NAME 36 | from information_schema.COLUMNS c 37 | left join information_schema.STATISTICS s on c.TABLE_NAME = s.TABLE_NAME 38 | and c.TABLE_SCHEMA = s.TABLE_SCHEMA 39 | and c.COLUMN_NAME = s.COLUMN_NAME 40 | where c.TABLE_SCHEMA = %s 41 | and c.TABLE_NAME = %s""" 42 | ret = await self.conn.execute_query_dict(sql, [self.database, table]) 43 | for row in ret: 44 | unique = index = False 45 | if (non_unique := row["NON_UNIQUE"]) is not None: 46 | unique = not non_unique 47 | elif row["COLUMN_KEY"] == "UNI": 48 | unique = True 49 | if (index_name := row["INDEX_NAME"]) is not None: 50 | index = index_name != "PRIMARY" 51 | columns.append( 52 | Column( 53 | name=row["COLUMN_NAME"], 54 | data_type=row["DATA_TYPE"], 55 | null=row["IS_NULLABLE"] == "YES", 56 | default=row["COLUMN_DEFAULT"], 57 | pk=row["COLUMN_KEY"] == "PRI", 58 | comment=row["COLUMN_COMMENT"], 59 | unique=unique, 60 | extra=row["EXTRA"], 61 | index=index, 62 | length=row["CHARACTER_MAXIMUM_LENGTH"], 63 | max_digits=row["NUMERIC_PRECISION"], 64 | decimal_places=row["NUMERIC_SCALE"], 65 | ) 66 | ) 67 | return columns 68 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Created by .ignore support plugin (hsz.mobi) 2 | ### Python template 3 | # Byte-compiled / optimized / DLL files 4 | __pycache__/ 5 | *.py[cod] 6 | *$py.class 7 | 8 | # C extensions 9 | *.so 10 | 11 | # Distribution / packaging 12 | .Python 13 | build/ 14 | develop-eggs/ 15 | dist/ 16 | downloads/ 17 | eggs/ 18 | .eggs/ 19 | lib/ 20 | lib64/ 21 | parts/ 22 | sdist/ 23 | var/ 24 | wheels/ 25 | pip-wheel-metadata/ 26 | share/python-wheels/ 27 | *.egg-info/ 28 | .installed.cfg 29 | *.egg 30 | MANIFEST 31 | 32 | # PyInstaller 33 | # Usually these files are written by a python script from a template 34 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 35 | *.manifest 36 | *.spec 37 | 38 | # Installer logs 39 | pip-log.txt 40 | pip-delete-this-directory.txt 41 | 42 | # Unit test / coverage reports 43 | htmlcov/ 44 | .tox/ 45 | .nox/ 46 | .coverage 47 | .coverage.* 48 | .cache 49 | nosetests.xml 50 | coverage.xml 51 | *.cover 52 | *.py,cover 53 | .hypothesis/ 54 | .pytest_cache/ 55 | cover/ 56 | 57 | # Translations 58 | *.mo 59 | *.pot 60 | 61 | # Django stuff: 62 | *.log 63 | local_settings.py 64 | db.sqlite3 65 | db.sqlite3-journal 66 | 67 | # Flask stuff: 68 | instance/ 69 | .webassets-cache 70 | 71 | # Scrapy stuff: 72 | .scrapy 73 | 74 | # Sphinx documentation 75 | docs/_build/ 76 | 77 | # PyBuilder 78 | .pybuilder/ 79 | target/ 80 | 81 | # Jupyter Notebook 82 | .ipynb_checkpoints 83 | 84 | # IPython 85 | profile_default/ 86 | ipython_config.py 87 | 88 | # pyenv 89 | # For a library or package, you might want to ignore these files since the code is 90 | # intended to run in multiple environments; otherwise, check them in: 91 | # .python-version 92 | 93 | # pipenv 94 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 95 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 96 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 97 | # install all needed dependencies. 98 | #Pipfile.lock 99 | 100 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 101 | __pypackages__/ 102 | 103 | # Celery stuff 104 | celerybeat-schedule 105 | celerybeat.pid 106 | 107 | # SageMath parsed files 108 | *.sage.py 109 | 110 | # Environments 111 | .env 112 | .venv 113 | env/ 114 | venv/ 115 | ENV/ 116 | env.bak/ 117 | venv.bak/ 118 | 119 | # Spyder project settings 120 | .spyderproject 121 | .spyproject 122 | 123 | # Rope project settings 124 | .ropeproject 125 | 126 | # mkdocs documentation 127 | /site 128 | 129 | # mypy 130 | .mypy_cache/ 131 | .dmypy.json 132 | dmypy.json 133 | 134 | # Pyre type checker 135 | .pyre/ 136 | 137 | # pytype static type analyzer 138 | .pytype/ 139 | 140 | # Cython debug symbols 141 | cython_debug/ 142 | 143 | .idea 144 | migrations 145 | aerich.ini 146 | src 147 | .vscode 148 | .DS_Store 149 | .python-version -------------------------------------------------------------------------------- /tests/test_per_app_migrations.py: -------------------------------------------------------------------------------- 1 | import shutil 2 | from pathlib import Path 3 | 4 | from tests._utils import ASSETS, copy_asset, requires_dialect, run_shell 5 | 6 | 7 | @requires_dialect("sqlite") 8 | def test_single_app(tmp_work_dir): 9 | shutil.copy(ASSETS / "settings.py", ".") 10 | run_shell("aerich init -t settings.TORTOISE_ORM --location './{app}/migrations'") 11 | models_dir = Path("models") 12 | models_dir.exists() or models_dir.mkdir() 13 | models_py = models_dir.joinpath("__init__.py") 14 | models_py.write_text("""from tortoise import Model, fields 15 | class Foo(Model): 16 | name = fields.CharField(20)""") 17 | run_shell("aerich init-db") 18 | assert not Path("migrations").exists() 19 | migrations_dir = Path("models/migrations/") 20 | assert migrations_dir.exists(), list(models_dir.glob("*")) + list(Path().glob("*")) 21 | assert list(migrations_dir.glob("0_*.py")) 22 | with models_py.open("a") as f: 23 | f.write("\n age = fields.IntField()") 24 | run_shell("aerich migrate") 25 | assert list(migrations_dir.glob("1_*.py")) 26 | out = run_shell("aerich upgrade") 27 | assert "success" in out.lower() 28 | 29 | 30 | @requires_dialect("sqlite") 31 | def test_multi_apps(tmp_work_dir): 32 | copy_asset("per_app_migrations") 33 | toml_file = Path("pyproject.toml") 34 | text = toml_file.read_text(encoding="utf-8") 35 | run_shell("aerich init -t settings.TORTOISE_ORM --location './{app}/migrations'") 36 | assert not Path("migrations").exists() 37 | assert Path("auth/migrations").exists(), list(Path().glob("*")) + list(Path("auth").glob("*")) 38 | assert Path("polls/migrations").exists(), list(Path().glob("*")) + list(Path("polls").glob("*")) 39 | assert text == toml_file.read_text(encoding="utf-8") 40 | 41 | output = run_shell("aerich --app auth init-db") 42 | assert "error" not in output.lower() 43 | assert list(Path("auth/migrations/").glob("0_*.py")), list(Path("auth/migrations").glob("*")) 44 | output = run_shell("aerich --app polls init-db") 45 | assert "error" not in output.lower() 46 | assert list(Path("polls/migrations/").glob("0_*.py")), list(Path("polls/migrations").glob("*")) 47 | output = run_shell("pytest -s _tests.py::test_1") 48 | 49 | assert "error" not in output.lower() 50 | shutil.move("models_2.py", "auth/models.py") 51 | output = run_shell("aerich --app auth migrate") 52 | assert "error" not in output.lower() 53 | output = run_shell("aerich --app auth upgrade") 54 | assert "error" not in output.lower() 55 | output = run_shell("pytest -s _tests.py::test_2") 56 | assert "error" not in output.lower() 57 | 58 | shutil.move("models_3.py", "polls/models.py") 59 | output = run_shell("aerich --app polls migrate") 60 | assert "error" not in output.lower() 61 | output = run_shell("aerich --app polls upgrade") 62 | assert "error" not in output.lower() 63 | output = run_shell("pytest -s _tests.py::test_3") 64 | assert "error" not in output.lower() 65 | -------------------------------------------------------------------------------- /aerich/ddl/mysql/__init__.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from typing import TYPE_CHECKING 4 | 5 | from tortoise.backends.mysql.schema_generator import MySQLSchemaGenerator 6 | 7 | from aerich.ddl import BaseDDL 8 | 9 | if TYPE_CHECKING: 10 | from tortoise import Model 11 | 12 | 13 | class MysqlDDL(BaseDDL): 14 | schema_generator_cls = MySQLSchemaGenerator 15 | DIALECT = MySQLSchemaGenerator.DIALECT 16 | _DROP_TABLE_TEMPLATE = "DROP TABLE IF EXISTS `{table_name}`" 17 | _ADD_COLUMN_TEMPLATE = "ALTER TABLE `{table_name}` ADD {column}" 18 | _ALTER_DEFAULT_TEMPLATE = "ALTER TABLE `{table_name}` ALTER COLUMN `{column}` {default}" 19 | _CHANGE_COLUMN_TEMPLATE = ( 20 | "ALTER TABLE `{table_name}` CHANGE {old_column_name} {new_column_name} {new_column_type}" 21 | ) 22 | _DROP_COLUMN_TEMPLATE = "ALTER TABLE `{table_name}` DROP COLUMN `{column_name}`" 23 | _RENAME_COLUMN_TEMPLATE = ( 24 | "ALTER TABLE `{table_name}` RENAME COLUMN `{old_column_name}` TO `{new_column_name}`" 25 | ) 26 | _ADD_INDEX_TEMPLATE = "ALTER TABLE `{table_name}` ADD {index_type}{unique}INDEX `{index_name}` ({column_names}){extra}" 27 | _DROP_INDEX_TEMPLATE = "ALTER TABLE `{table_name}` DROP INDEX `{index_name}`" 28 | _ADD_INDEXED_UNIQUE_TEMPLATE = ( 29 | "ALTER TABLE `{table_name}` DROP INDEX `{index_name}`, ADD UNIQUE (`{column_name}`)" 30 | ) 31 | _DROP_INDEXED_UNIQUE_TEMPLATE = "ALTER TABLE `{table_name}` DROP INDEX `{column_name}`, ADD INDEX `{index_name}` (`{column_name}`)" 32 | _ADD_FK_TEMPLATE = "ALTER TABLE `{table_name}` ADD CONSTRAINT `{fk_name}` FOREIGN KEY (`{db_column}`) REFERENCES `{table}` (`{field}`) ON DELETE {on_delete}" 33 | _DROP_FK_TEMPLATE = "ALTER TABLE `{table_name}` DROP FOREIGN KEY `{fk_name}`" 34 | _M2M_TABLE_TEMPLATE = ( 35 | "CREATE TABLE `{table_name}` (\n" 36 | " `{backward_key}` {backward_type} NOT NULL REFERENCES `{backward_table}` (`{backward_field}`) ON DELETE CASCADE,\n" 37 | " `{forward_key}` {forward_type} NOT NULL REFERENCES `{forward_table}` (`{forward_field}`) ON DELETE CASCADE\n" 38 | "){extra}{comment}" 39 | ) 40 | _MODIFY_COLUMN_TEMPLATE = "ALTER TABLE `{table_name}` MODIFY COLUMN {column}" 41 | _RENAME_TABLE_TEMPLATE = "ALTER TABLE `{old_table_name}` RENAME TO `{new_table_name}`" 42 | 43 | def _index_name(self, unique: bool | None, model: type[Model], field_names: list[str]) -> str: 44 | if unique and len(field_names) == 1: 45 | # Example: `email = CharField(max_length=50, unique=True)` 46 | # Generate schema: `"email" VARCHAR(10) NOT NULL UNIQUE` 47 | # Unique index key is the same as field name: `email` 48 | return field_names[0] 49 | return super()._index_name(unique, model, field_names) 50 | 51 | def alter_indexed_column_unique( 52 | self, model: type[Model], field_name: str, drop: bool = False 53 | ) -> list[str]: 54 | # if drop is false: Drop index and add unique 55 | # else: Drop unique index and add normal index 56 | template = self._DROP_INDEXED_UNIQUE_TEMPLATE if drop else self._ADD_INDEXED_UNIQUE_TEMPLATE 57 | table = self.get_table_name(model) 58 | index = self._index_name(unique=False, model=model, field_names=[field_name]) 59 | return [template.format(table_name=table, index_name=index, column_name=field_name)] 60 | -------------------------------------------------------------------------------- /aerich/ddl/postgres/__init__.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from typing import cast 4 | 5 | from tortoise import Model 6 | from tortoise.backends.base_postgres.schema_generator import BasePostgresSchemaGenerator 7 | 8 | from aerich.ddl import BaseDDL 9 | 10 | 11 | class PostgresDDL(BaseDDL): 12 | schema_generator_cls = BasePostgresSchemaGenerator 13 | DIALECT = BasePostgresSchemaGenerator.DIALECT 14 | _ADD_INDEX_TEMPLATE = 'CREATE {unique}INDEX IF NOT EXISTS "{index_name}" ON "{table_name}" {index_type}({column_names}){extra}' 15 | _DROP_INDEX_TEMPLATE = 'DROP INDEX IF EXISTS "{index_name}"' 16 | _ALTER_NULL_TEMPLATE = 'ALTER TABLE "{table_name}" ALTER COLUMN "{column}" {set_drop} NOT NULL' 17 | _MODIFY_COLUMN_TEMPLATE = ( 18 | 'ALTER TABLE "{table_name}" ALTER COLUMN "{column}" TYPE {datatype}{using}' 19 | ) 20 | _SET_COMMENT_TEMPLATE = 'COMMENT ON COLUMN "{table_name}"."{column}" IS {comment}' 21 | _DROP_FK_TEMPLATE = 'ALTER TABLE "{table_name}" DROP CONSTRAINT IF EXISTS "{fk_name}"' 22 | 23 | def alter_column_null(self, model: type[Model], field_describe: dict) -> str: 24 | db_table = model._meta.db_table 25 | return self._ALTER_NULL_TEMPLATE.format( 26 | table_name=db_table, 27 | column=field_describe.get("db_column"), 28 | set_drop="DROP" if field_describe.get("nullable") else "SET", 29 | ) 30 | 31 | def modify_column(self, model: type[Model], field_describe: dict, is_pk: bool = False) -> str: 32 | db_table = model._meta.db_table 33 | db_field_types = cast(dict, field_describe.get("db_field_types")) 34 | db_column = field_describe.get("db_column") 35 | datatype = db_field_types.get(self.DIALECT) or db_field_types.get("") 36 | return self._MODIFY_COLUMN_TEMPLATE.format( 37 | table_name=db_table, 38 | column=db_column, 39 | datatype=datatype, 40 | using=f' USING "{db_column}"::{datatype}', 41 | ) 42 | 43 | def set_comment(self, model: type[Model], field_describe: dict) -> str: 44 | db_table = model._meta.db_table 45 | return self._SET_COMMENT_TEMPLATE.format( 46 | table_name=db_table, 47 | column=field_describe.get("db_column") or field_describe.get("raw_field"), 48 | comment=( 49 | "{quote}{comment}{quote}".format( 50 | quote="'", comment=self.schema_generator._escape_comment(desc) 51 | ) 52 | if (desc := field_describe.get("description")) 53 | else "NULL" 54 | ), 55 | ) 56 | 57 | def drop_unique_index( 58 | self, 59 | model: type[Model], 60 | field_name: str, 61 | ) -> list[str]: 62 | # When change unique to be true for exists column, it's a normal index 63 | drop_normal_index = self.drop_index(model, [field_name], unique=True) 64 | # While add a new column with unique=True, it's a unique constraint 65 | table_name = self.get_table_name(model) 66 | contraint_name = f"{table_name}_{field_name}_key" 67 | drop_constraint = self.drop_unique_constraint(model, contraint_name) 68 | # To avoid connecting db to validate INDEX/CONSTRAINT, drop both of them 69 | # as the templates of drop index/contraints are using 'IF EXISTS'. 70 | return [drop_normal_index, drop_constraint] 71 | -------------------------------------------------------------------------------- /aerich/inspectdb/postgres.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import re 4 | from typing import TYPE_CHECKING 5 | 6 | from aerich.inspectdb import Column, FieldMapDict, Inspect 7 | 8 | if TYPE_CHECKING: 9 | from tortoise.backends.base_postgres.client import BasePostgresClient 10 | 11 | 12 | class InspectPostgres(Inspect): 13 | def __init__(self, conn: BasePostgresClient, tables: list[str] | None = None) -> None: 14 | super().__init__(conn, tables) 15 | self.schema = conn.server_settings.get("schema") or "public" 16 | 17 | @property 18 | def field_map(self) -> FieldMapDict: 19 | return { 20 | "int2": self.smallint_field, 21 | "int4": self.int_field, 22 | "int8": self.bigint_field, 23 | "smallint": self.smallint_field, 24 | "bigint": self.bigint_field, 25 | "varchar": self.char_field, 26 | "text": self.text_field, 27 | "timestamptz": self.datetime_field, 28 | "float4": self.float_field, 29 | "float8": self.float_field, 30 | "date": self.date_field, 31 | "time": self.time_field, 32 | "decimal": self.decimal_field, 33 | "numeric": self.decimal_field, 34 | "uuid": self.uuid_field, 35 | "jsonb": self.json_field, 36 | "bytea": self.binary_field, 37 | "bool": self.bool_field, 38 | "timestamp": self.datetime_field, 39 | } 40 | 41 | async def get_all_tables(self) -> list[str]: 42 | sql = "select TABLE_NAME from information_schema.TABLES where table_catalog=$1 and table_schema=$2" 43 | ret = await self.conn.execute_query_dict(sql, [self.database, self.schema]) 44 | return list(map(lambda x: x["table_name"], ret)) 45 | 46 | async def get_columns(self, table: str) -> list[Column]: 47 | columns = [] 48 | sql = f"""select c.column_name, 49 | col_description('public.{table}'::regclass, ordinal_position) as column_comment, 50 | t.constraint_type as column_key, 51 | udt_name as data_type, 52 | is_nullable, 53 | column_default, 54 | character_maximum_length, 55 | numeric_precision, 56 | numeric_scale 57 | from information_schema.constraint_column_usage const 58 | join information_schema.table_constraints t 59 | using (table_catalog, table_schema, table_name, constraint_catalog, constraint_schema, constraint_name) 60 | right join information_schema.columns c using (column_name, table_catalog, table_schema, table_name) 61 | where c.table_catalog = $1 62 | and c.table_name = $2 63 | and c.table_schema = $3""" # nosec:B608 64 | if "psycopg" in str(type(self.conn)).lower(): 65 | sql = re.sub(r"\$[123]", "%s", sql) 66 | ret = await self.conn.execute_query_dict(sql, [self.database, table, self.schema]) 67 | for row in ret: 68 | columns.append( 69 | Column( 70 | name=row["column_name"], 71 | data_type=row["data_type"], 72 | null=row["is_nullable"] == "YES", 73 | default=row["column_default"], 74 | length=row["character_maximum_length"], 75 | max_digits=row["numeric_precision"], 76 | decimal_places=row["numeric_scale"], 77 | comment=row["column_comment"], 78 | pk=row["column_key"] == "PRIMARY KEY", 79 | unique=False, # can't get this simply 80 | index=False, # can't get this simply 81 | ) 82 | ) 83 | return columns 84 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [project] 2 | name = "aerich" 3 | dynamic = ["version"] 4 | description = "A database migrations tool for Tortoise ORM." 5 | authors = [{name="long2ice", email="long2ice@gmail.com>"}] 6 | license = {text="Apache-2.0"} 7 | readme = "README.md" 8 | keywords = ["migrate", "Tortoise-ORM", "mysql", "postgresql"] 9 | requires-python = ">=3.9" 10 | dependencies = [ 11 | "tortoise-orm (>=0.21.0,<1.0.0)", 12 | "dictdiffer (>=0.9.0,<1.0.0)", 13 | "asyncclick (>=8.1.7,<9.0.0)", 14 | "anyio (>=3.6.2,<5.0.0)", 15 | ] 16 | classifiers = [ 17 | "License :: OSI Approved :: Apache Software License", 18 | "Development Status :: 3 - Alpha", 19 | "Intended Audience :: Developers", 20 | "Programming Language :: Python :: 3", 21 | "Programming Language :: Python :: 3.9", 22 | "Programming Language :: Python :: 3.10", 23 | "Programming Language :: Python :: 3.11", 24 | "Programming Language :: Python :: 3.12", 25 | "Programming Language :: Python :: 3.13", 26 | "Programming Language :: Python :: 3.14", 27 | "Programming Language :: Python :: Implementation :: CPython", 28 | "Programming Language :: PL/SQL", 29 | "Framework :: AsyncIO", 30 | "Topic :: Software Development :: Libraries :: Python Modules", 31 | "Topic :: Database", 32 | "Typing :: Typed", 33 | "Operating System :: POSIX", 34 | "Operating System :: MacOS :: MacOS X", 35 | "Operating System :: Microsoft :: Windows", 36 | ] 37 | 38 | [project.optional-dependencies] 39 | toml = [ 40 | "tomli-w (>=1.1.0,<2.0.0); python_version >= '3.11'", 41 | "tomlkit (>=0.11.4,<1.0.0); python_version < '3.11'", 42 | ] 43 | # Need asyncpg or psyncopg for PostgreSQL 44 | asyncpg = ["asyncpg"] 45 | psycopg = ["psycopg[pool,binary] (>=3.0.12,<4.0.0)"] 46 | # Need asyncmy or aiomysql for MySQL 47 | asyncmy = ["asyncmy>=0.2.9"] 48 | mysql = ["aiomysql>=0.2.0"] 49 | 50 | [project.urls] 51 | homepage = "https://github.com/tortoise/aerich" 52 | repository = "https://github.com/tortoise/aerich.git" 53 | documentation = "https://github.com/tortoise/aerich" 54 | 55 | [project.scripts] 56 | aerich = "aerich.cli:main" 57 | 58 | [dependency-groups] 59 | dev = [ 60 | "ruff >=0.9.0", 61 | "bandit >=1.7.0", 62 | "mypy >=1.10.0", 63 | "twine >=6.1.0", 64 | ] 65 | test = [ 66 | "pytest >=8.3.0", 67 | "pytest-mock >=3.14.0", 68 | "pytest-cov>=7.0.0", 69 | "async-timeout >=5.0.1; python_version <'3.11'", 70 | "pydantic-settings >=2.10.1", 71 | # required for sha256_password by asyncmy 72 | "cryptography; python_version >='3.9.0,!=3.9.1'", 73 | ] 74 | vector = [ 75 | "tortoise-vector >=0.2.0; python_version >='3.10'", 76 | ] 77 | 78 | [tool.aerich] 79 | tortoise_orm = "conftest.tortoise_orm" 80 | location = "./migrations" 81 | src_folder = "./." 82 | 83 | [build-system] 84 | requires = ["pdm-backend"] 85 | build-backend = "pdm.backend" 86 | 87 | [tool.pdm] 88 | version = {source="file", path="aerich/version.py"} 89 | 90 | [tool.pdm.build] 91 | excludes = ["./**/.git", "./**/.*_cache"] 92 | include = ["CHANGELOG.md", "LICENSE", "README.md"] 93 | 94 | [tool.pytest.ini_options] 95 | anyio_mode = "auto" 96 | 97 | [tool.coverage.run] 98 | branch = true 99 | source = ["aerich"] 100 | 101 | [tool.coverage.report] 102 | exclude_also = [ 103 | "if TYPE_CHECKING:" 104 | ] 105 | 106 | [tool.mypy] 107 | pretty = true 108 | check_untyped_defs = true 109 | warn_unused_ignores = true 110 | disallow_incomplete_defs = false 111 | exclude = ["tests/assets", "migrations"] 112 | 113 | [[tool.mypy.overrides]] 114 | module = [ 115 | 'dictdiffer.*', 116 | 'orjson', 117 | 'tomlkit', 118 | 'tomli_w', 119 | 'tomli', 120 | ] 121 | ignore_missing_imports = true 122 | 123 | [tool.ruff] 124 | line-length = 100 125 | 126 | [tool.ruff.lint] 127 | extend-select = [ 128 | "I", # https://docs.astral.sh/ruff/rules/#isort-i 129 | "SIM", # https://docs.astral.sh/ruff/rules/#flake8-simplify-sim 130 | "FA", # https://docs.astral.sh/ruff/rules/#flake8-future-annotations-fa 131 | "UP", # https://docs.astral.sh/ruff/rules/#pyupgrade-up 132 | "RUF100", # https://docs.astral.sh/ruff/rules/#ruff-specific-rules-ruf 133 | ] 134 | ignore = ["UP031"] # https://docs.astral.sh/ruff/rules/printf-string-formatting/ 135 | 136 | [tool.ruff.lint.isort] 137 | extra-standard-library = ["tomllib"] 138 | 139 | [tool.bandit] 140 | exclude_dirs = ["tests", "conftest.py"] 141 | -------------------------------------------------------------------------------- /tests/old_models.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | from enum import IntEnum 3 | 4 | from tortoise import Model, fields 5 | from tortoise.indexes import Index 6 | 7 | from tests.indexes import CustomIndex 8 | 9 | 10 | class ProductType(IntEnum): 11 | article = 1 12 | page = 2 13 | 14 | 15 | class PermissionAction(IntEnum): 16 | create = 1 17 | delete = 2 18 | update = 3 19 | read = 4 20 | 21 | 22 | class Status(IntEnum): 23 | on = 1 24 | off = 0 25 | 26 | 27 | class User(Model): 28 | username = fields.CharField(max_length=20) 29 | password = fields.CharField(max_length=200) 30 | last_login = fields.DatetimeField(description="Last Login", default=datetime.datetime.now) 31 | is_active = fields.BooleanField(default=True, description="Is Active") 32 | is_superuser = fields.BooleanField(default=False, description="Is SuperUser") 33 | avatar = fields.CharField(max_length=200, default="") 34 | intro = fields.TextField(default="") 35 | longitude = fields.DecimalField(max_digits=12, decimal_places=9) 36 | 37 | class Meta: 38 | indexes = [Index(fields=("username", "is_active")), CustomIndex(fields=("is_superuser",))] 39 | 40 | 41 | class Email(Model): 42 | email = fields.CharField(max_length=200) 43 | company = fields.CharField(max_length=100, db_index=True) 44 | is_primary = fields.BooleanField(default=False) 45 | user: fields.ForeignKeyRelation[User] = fields.ForeignKeyField( 46 | "models.User", db_constraint=False 47 | ) 48 | 49 | 50 | class Category(Model): 51 | slug = fields.CharField(max_length=200) 52 | name = fields.CharField(max_length=200) 53 | user: fields.ForeignKeyRelation[User] = fields.ForeignKeyField( 54 | "models.User", description="User" 55 | ) 56 | title = fields.CharField(max_length=20, unique=True) 57 | created_at = fields.DatetimeField(auto_now_add=True) 58 | 59 | class Meta: 60 | indexes = [Index(fields=("slug",))] 61 | 62 | 63 | class Product(Model): 64 | categories: fields.ManyToManyRelation[Category] = fields.ManyToManyField("models.Category") 65 | uid = fields.IntField(source_field="uuid", unique=True) 66 | name = fields.CharField(max_length=50) 67 | view_num = fields.IntField(description="View Num") 68 | sort = fields.IntField() 69 | is_review = fields.BooleanField(description="Is Reviewed") 70 | type: int = fields.IntEnumField( 71 | ProductType, description="Product Type", source_field="type_db_alias" 72 | ) 73 | image = fields.CharField(max_length=200) 74 | body = fields.TextField() 75 | created_at = fields.DatetimeField(auto_now_add=True) 76 | is_delete = fields.BooleanField(default=False) 77 | 78 | 79 | class Config(Model): 80 | slug = fields.CharField(primary_key=True, max_length=10) 81 | category: fields.ManyToManyRelation[Category] = fields.ManyToManyField("models.Category") 82 | categories: fields.ManyToManyRelation[Category] = fields.ManyToManyField( 83 | "models.Category", through="config_category_map", related_name="config_set" 84 | ) 85 | name = fields.CharField(max_length=100, unique=True) 86 | label = fields.CharField(max_length=200) 87 | key = fields.CharField(max_length=20) 88 | value: dict = fields.JSONField() 89 | status: Status = fields.IntEnumField(Status, default=Status.on) 90 | 91 | class Meta: 92 | table = "configs" 93 | 94 | 95 | class DontManageMe(Model): 96 | name = fields.CharField(max_length=50) 97 | 98 | class Meta: 99 | table = "dont_manage" 100 | 101 | 102 | class Ignore(Model): 103 | name = fields.CharField(max_length=50) 104 | 105 | class Meta: 106 | managed = True 107 | 108 | 109 | def main() -> None: 110 | """Generate a python file for the old_models_describe""" 111 | from pathlib import Path 112 | 113 | from tortoise import run_async 114 | from tortoise.contrib.test import init_memory_sqlite 115 | 116 | from aerich.utils import get_models_describe 117 | 118 | @init_memory_sqlite 119 | async def run() -> None: 120 | old_models_describe = get_models_describe("models") 121 | p = Path("old_models_describe.py") 122 | p.write_text(f"{old_models_describe = }", encoding="utf-8") 123 | print(f"Write value to {p}\nYou can reformat it by `ruff format {p}`") 124 | 125 | run_async(run()) 126 | 127 | 128 | if __name__ == "__main__": 129 | main() 130 | -------------------------------------------------------------------------------- /tests/models.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import datetime 4 | import uuid 5 | from enum import IntEnum 6 | 7 | from tortoise import Model, fields 8 | from tortoise.contrib.mysql.indexes import FullTextIndex 9 | from tortoise.contrib.postgres.indexes import HashIndex 10 | from tortoise.fields import OnDelete 11 | from tortoise.indexes import Index 12 | 13 | from tests._utils import Dialect 14 | from tests.indexes import CustomIndex 15 | 16 | 17 | class ProductType(IntEnum): 18 | article = 1 19 | page = 2 20 | 21 | 22 | class PermissionAction(IntEnum): 23 | create = 1 24 | delete = 2 25 | update = 3 26 | read = 4 27 | 28 | 29 | class Status(IntEnum): 30 | on = 1 31 | off = 0 32 | 33 | 34 | class User(Model): 35 | username = fields.CharField(max_length=20, unique=True) 36 | password = fields.CharField(max_length=100) 37 | last_login = fields.DatetimeField(description="Last Login", default=datetime.datetime.now) 38 | is_active = fields.BooleanField(default=True, description="Is Active") 39 | is_superuser = fields.BooleanField(default=False, description="Is SuperUser") 40 | intro = fields.TextField(default="") 41 | longitude = fields.DecimalField(max_digits=10, decimal_places=8) 42 | 43 | products: fields.ManyToManyRelation[Product] 44 | 45 | class Meta: 46 | # reverse indexes elements 47 | indexes = [CustomIndex(fields=("is_superuser",)), Index(fields=("username", "is_active"))] 48 | 49 | 50 | class Email(Model): 51 | email_id = fields.IntField(primary_key=True) 52 | email = fields.CharField(max_length=200, db_index=True) 53 | company = fields.CharField(max_length=100, db_index=True, unique=True) 54 | is_primary = fields.BooleanField(default=False) 55 | address = fields.CharField(max_length=200) 56 | users: fields.ManyToManyRelation[User] = fields.ManyToManyField("models.User") 57 | config: fields.OneToOneRelation[Config] = fields.OneToOneField("models.Config") 58 | 59 | 60 | def default_name(): 61 | return uuid.uuid4() 62 | 63 | 64 | class Category(Model): 65 | slug = fields.CharField(max_length=100) 66 | name = fields.CharField(max_length=200, null=True, default=default_name) 67 | owner: fields.ForeignKeyRelation[User] = fields.ForeignKeyField( 68 | "models.User", description="User" 69 | ) 70 | title = fields.CharField(max_length=20, unique=False) 71 | created_at = fields.DatetimeField(auto_now_add=True) 72 | 73 | class Meta: 74 | if Dialect.is_postgres(): 75 | indexes = [HashIndex(fields=("slug",))] 76 | elif Dialect.is_mysql(): 77 | indexes = [FullTextIndex(fields=("slug",))] # type:ignore 78 | else: 79 | indexes = [Index(fields=("slug",))] # type:ignore 80 | 81 | 82 | class Product(Model): 83 | id = fields.BigIntField(primary_key=True) 84 | categories: fields.ManyToManyRelation[Category] = fields.ManyToManyField( 85 | "models.Category", null=False, on_delete=OnDelete.NO_ACTION 86 | ) 87 | users: fields.ManyToManyRelation[User] = fields.ManyToManyField( 88 | "models.User", related_name="products" 89 | ) 90 | name = fields.CharField(max_length=50) 91 | view_num = fields.IntField(description="View Num", default=0) 92 | sort = fields.IntField() 93 | is_reviewed = fields.BooleanField(description="Is Reviewed") 94 | type: int = fields.IntEnumField( 95 | ProductType, description="Product Type", source_field="type_db_alias" 96 | ) 97 | pic = fields.CharField(max_length=200) 98 | body = fields.TextField() 99 | price = fields.FloatField(null=True) 100 | no = fields.UUIDField(db_index=True) 101 | created_at = fields.DatetimeField(auto_now_add=True) 102 | is_deleted = fields.BooleanField(default=False) 103 | 104 | class Meta: 105 | unique_together = (("name", "type"),) 106 | indexes = (("name", "type"),) 107 | managed = True 108 | 109 | 110 | class Config(Model): 111 | slug = fields.CharField(primary_key=True, max_length=20) 112 | categories: fields.ManyToManyRelation[Category] = fields.ManyToManyField( 113 | "models.Category", through="config_category_map", related_name="category_set" 114 | ) 115 | name = fields.CharField(max_length=100, db_index=True) 116 | label = fields.CharField(max_length=200) 117 | key = fields.CharField(max_length=20) 118 | value: dict = fields.JSONField() 119 | status: Status = fields.IntEnumField(Status) 120 | user: fields.ForeignKeyRelation[User] = fields.ForeignKeyField( 121 | "models.User", description="User" 122 | ) 123 | 124 | email: fields.OneToOneRelation[Email] 125 | 126 | class Meta: 127 | managed = True 128 | 129 | 130 | class DontManageMe(Model): 131 | name = fields.CharField(max_length=50) 132 | 133 | class Meta: 134 | managed = False 135 | 136 | 137 | class Ignore(Model): 138 | class Meta: 139 | managed = False 140 | 141 | 142 | class NewModel(Model): 143 | name = fields.CharField(max_length=50) 144 | -------------------------------------------------------------------------------- /tests/test_fake.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import os 4 | import re 5 | from pathlib import Path 6 | 7 | from tests._utils import prepare_py_files, run_shell, skip_dialect, tmp_daily_db 8 | 9 | 10 | def _append_field(*files: str, name="field_1") -> None: 11 | for file in files: 12 | p = Path(file) 13 | field = f" {name} = fields.IntField(default=0)" 14 | with p.open("a") as f: 15 | f.write(os.linesep + field) 16 | 17 | 18 | # TODO: remove skip decorator to test sqlite if alter-column supported 19 | @skip_dialect("sqlite") 20 | def test_fake(tmp_work_dir): 21 | prepare_py_files("fake", with_testing_models=True) 22 | with tmp_daily_db(): 23 | _test_fake() 24 | 25 | 26 | def _test_fake(): 27 | output = run_shell("aerich init -t settings.TORTOISE_ORM") 28 | assert "Success" in output 29 | output = run_shell("aerich init-db") 30 | assert "Success" in output 31 | output = run_shell("aerich --app models_second init-db") 32 | assert "Success" in output 33 | output = run_shell("pytest _tests.py::test_init_db") 34 | assert "error" not in output.lower() 35 | _append_field("models.py", "models_second.py") 36 | output = run_shell("aerich migrate") 37 | assert "Success" in output 38 | output = run_shell("aerich --app models_second migrate") 39 | assert "Success" in output 40 | output = run_shell("aerich upgrade --fake") 41 | assert "FAKED" in output 42 | output = run_shell("aerich --app models_second upgrade --fake") 43 | assert "FAKED" in output 44 | output = run_shell("pytest _tests.py::test_fake_field_1") 45 | assert "error" not in output.lower() 46 | _append_field("models.py", "models_second.py", name="field_2") 47 | output = run_shell("aerich migrate") 48 | assert "Success" in output 49 | output = run_shell("aerich --app models_second migrate") 50 | assert "Success" in output 51 | output = run_shell("aerich heads") 52 | assert "_update.py" in output 53 | output = run_shell("aerich upgrade --fake") 54 | assert "FAKED" in output 55 | output = run_shell("aerich --app models_second upgrade --fake") 56 | assert "FAKED" in output 57 | output = run_shell("pytest _tests.py::test_fake_field_2") 58 | assert "error" not in output.lower() 59 | output = run_shell("aerich heads") 60 | assert "No available heads." in output 61 | output = run_shell("aerich --app models_second heads") 62 | assert "No available heads." in output 63 | _append_field("models.py", "models_second.py", name="field_3") 64 | run_shell("aerich migrate", capture_output=False) 65 | run_shell("aerich --app models_second migrate", capture_output=False) 66 | run_shell("aerich upgrade --fake", capture_output=False) 67 | run_shell("aerich --app models_second upgrade --fake", capture_output=False) 68 | output = run_shell("aerich downgrade --fake -v 2 --yes", input="y\n") 69 | assert "FAKED" in output 70 | output = run_shell("aerich --app models_second downgrade --fake -v 2 --yes", input="y\n") 71 | assert "FAKED" in output 72 | output = run_shell("aerich heads") 73 | assert "No available heads." not in output 74 | assert not re.search(r"1_\d+_update\.py", output) 75 | assert re.search(r"2_\d+_update\.py", output) 76 | output = run_shell("aerich --app models_second heads") 77 | assert "No available heads." not in output 78 | assert not re.search(r"1_\d+_update\.py", output) 79 | assert re.search(r"2_\d+_update\.py", output) 80 | output = run_shell("aerich downgrade --fake -v 1 --yes", input="y\n") 81 | assert "FAKED" in output 82 | output = run_shell("aerich --app models_second downgrade --fake -v 1 --yes", input="y\n") 83 | assert "FAKED" in output 84 | output = run_shell("aerich heads") 85 | assert "No available heads." not in output 86 | assert re.search(r"1_\d+_update\.py", output) 87 | assert re.search(r"2_\d+_update\.py", output) 88 | output = run_shell("aerich --app models_second heads") 89 | assert "No available heads." not in output 90 | assert re.search(r"1_\d+_update\.py", output) 91 | assert re.search(r"2_\d+_update\.py", output) 92 | output = run_shell("aerich upgrade --fake") 93 | assert "FAKED" in output 94 | output = run_shell("aerich --app models_second upgrade --fake") 95 | assert "FAKED" in output 96 | output = run_shell("aerich heads") 97 | assert "No available heads." in output 98 | output = run_shell("aerich --app models_second heads") 99 | assert "No available heads." in output 100 | output = run_shell("aerich downgrade --fake -v 1 --yes", input="y\n") 101 | assert "FAKED" in output 102 | output = run_shell("aerich --app models_second downgrade --fake -v 1 --yes", input="y\n") 103 | assert "FAKED" in output 104 | output = run_shell("aerich heads") 105 | assert "No available heads." not in output 106 | assert re.search(r"1_\d+_update\.py", output) 107 | assert re.search(r"2_\d+_update\.py", output) 108 | output = run_shell("aerich --app models_second heads") 109 | assert "No available heads." not in output 110 | assert re.search(r"1_\d+_update\.py", output) 111 | assert re.search(r"2_\d+_update\.py", output) 112 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: ci 2 | on: 3 | push: 4 | branches-ignore: 5 | - dev 6 | paths-ignore: 7 | - "*.md" 8 | pull_request: 9 | branches: 10 | - dev 11 | paths-ignore: 12 | - "*.md" 13 | jobs: 14 | ci: 15 | runs-on: ubuntu-latest 16 | services: 17 | postgres: 18 | image: postgres 19 | ports: 20 | - 5432:5432 21 | env: 22 | POSTGRES_PASSWORD: 123456 23 | POSTGRES_USER: postgres 24 | options: --health-cmd=pg_isready --health-interval 10s --health-timeout 5s --health-retries 5 25 | strategy: 26 | matrix: 27 | python-version: ["3.9", "3.10", "3.11", "3.12", "3.13"] 28 | tortoise-orm: 29 | - tortoise021 30 | - tortoise022 31 | - tortoise023 32 | - tortoise024 33 | - tortoise025 34 | - tortoisedev 35 | steps: 36 | - name: Start MySQL 37 | run: sudo systemctl start mysql.service 38 | - uses: actions/checkout@v5 39 | - uses: astral-sh/setup-uv@v7 40 | with: 41 | enable-cache: true 42 | activate-environment: true 43 | python-version: ${{ matrix.python-version }} 44 | - name: Install dependencies 45 | run: make deps options='--no-extra=psycopg' 46 | - name: Install TortoiseORM v0.21 47 | if: matrix.tortoise-orm == 'tortoise021' 48 | run: uv pip install --upgrade "tortoise-orm>=0.21,<0.22" 49 | - name: Install TortoiseORM v0.22 50 | if: matrix.tortoise-orm == 'tortoise022' 51 | run: uv pip install --upgrade "tortoise-orm>=0.22,<0.23" 52 | - name: Install TortoiseORM v0.23 53 | if: matrix.tortoise-orm == 'tortoise023' 54 | run: uv pip install --upgrade "tortoise-orm>=0.23,<0.24" 55 | - name: Install TortoiseORM v0.24 56 | if: matrix.tortoise-orm == 'tortoise024' 57 | run: uv pip install --upgrade "tortoise-orm>=0.24,<0.25" 58 | - name: Install TortoiseORM v0.25 59 | if: matrix.tortoise-orm == 'tortoise025' 60 | run: uv pip install --upgrade "tortoise-orm>=0.25,<0.26" 61 | - name: Install TortoiseORM develop branch 62 | if: matrix.tortoise-orm == 'tortoisedev' 63 | run: | 64 | uv pip uninstall tortoise-orm 65 | uv pip install --upgrade "git+https://github.com/tortoise/tortoise-orm" 66 | - name: Build sdist 67 | run: make _build 68 | - name: Check style 69 | run: make _check 70 | - name: Run tests 71 | env: 72 | MYSQL_PASS: root 73 | MYSQL_HOST: 127.0.0.1 74 | MYSQL_PORT: 3306 75 | POSTGRES_PASS: 123456 76 | POSTGRES_HOST: 127.0.0.1 77 | POSTGRES_PORT: 5432 78 | run: make _testall 79 | - name: Show test coverage 80 | run: make report 81 | 82 | asyncmySupport: 83 | runs-on: ubuntu-latest 84 | steps: 85 | - name: Start MySQL 86 | run: sudo systemctl start mysql.service 87 | - uses: actions/checkout@v5 88 | - uses: astral-sh/setup-uv@v7 89 | with: 90 | enable-cache: true 91 | python-version: '3.13' 92 | activate-environment: true 93 | - name: Install dependencies 94 | run: uv sync --group test --extra toml --extra asyncmy 95 | - name: Test MySQL with asyncmy 96 | run: make test_mysql 97 | env: 98 | MYSQL_PASS: root 99 | MYSQL_HOST: 127.0.0.1 100 | MYSQL_PORT: 3306 101 | # Verify `poetry add aerich` work 102 | AERICH_TEST_POETRY_ADD: 1 103 | - name: Show test coverage 104 | run: make report 105 | 106 | postgresVector: 107 | runs-on: ubuntu-latest 108 | services: 109 | postgres: 110 | image: ankane/pgvector:latest 111 | ports: 112 | - 5432:5432 113 | env: 114 | POSTGRES_PASSWORD: 123456 115 | POSTGRES_USER: postgres 116 | options: --health-cmd=pg_isready --health-interval 10s --health-timeout 5s --health-retries 5 117 | steps: 118 | - uses: actions/checkout@v5 119 | - uses: astral-sh/setup-uv@v7 120 | with: 121 | enable-cache: true 122 | python-version: '3.13' 123 | activate-environment: true 124 | - name: Install dependencies 125 | run: uv pip install --group vector --group test -e ".[toml,mysql]" 126 | - name: Test tortoise vector 127 | run: make test_postgres_vector 128 | env: 129 | POSTGRES_PASS: 123456 130 | POSTGRES_HOST: 127.0.0.1 131 | POSTGRES_PORT: 5432 132 | - name: Show test coverage 133 | run: make report 134 | 135 | psycopgSupport: 136 | runs-on: ${{ matrix.os }} 137 | strategy: 138 | matrix: 139 | os: [ubuntu-latest, windows-latest] 140 | steps: 141 | - uses: actions/checkout@v5 142 | - uses: actions/setup-python@v6 143 | with: 144 | python-version: '3.13' 145 | - uses: ikalnytskyi/action-setup-postgres@v7 146 | with: 147 | username: postgres 148 | password: 123456 149 | - uses: astral-sh/setup-uv@v7 150 | with: 151 | enable-cache: true 152 | - name: Install dependencies 153 | run: | 154 | uv pip install --system --group test -e ".[toml,psycopg,mysql]" 155 | - name: Test psycopg 156 | run: make test_psycopg 157 | env: 158 | POSTGRES_HOST: 127.0.0.1 159 | POSTGRES_PASS: 123456 160 | POSTGRES_PORT: 5432 161 | - name: Show test coverage 162 | run: make report 163 | -------------------------------------------------------------------------------- /aerich/_compat.py: -------------------------------------------------------------------------------- 1 | # mypy: disable-error-code="no-redef" 2 | from __future__ import annotations 3 | 4 | import platform 5 | import re 6 | import sys 7 | from types import ModuleType 8 | from typing import TYPE_CHECKING, cast 9 | 10 | import tortoise 11 | 12 | if sys.version_info >= (3, 11): 13 | import tomllib 14 | from typing import Self 15 | else: 16 | from typing_extensions import Self 17 | 18 | try: 19 | import tomli as tomllib 20 | except ImportError: 21 | import tomlkit as tomllib 22 | 23 | if TYPE_CHECKING: 24 | from tortoise import Model 25 | from tortoise.fields.relational import ManyToManyFieldInstance 26 | 27 | 28 | __all__ = ("Self", "tomllib", "imports_tomlkit", "tortoise_version_less_than") 29 | 30 | 31 | def imports_tomlkit() -> ModuleType: 32 | try: 33 | import tomli_w as tomlkit 34 | except ImportError: 35 | import tomlkit 36 | return tomlkit 37 | 38 | 39 | def tortoise_version_less_than(version: str) -> bool: 40 | # The min version of tortoise is '0.11.0', so we can compare it by a `<`, 41 | return tortoise.__version__ < version 42 | 43 | 44 | def _init_asyncio_patch() -> None: 45 | """ 46 | Select compatible event loop for psycopg3. 47 | 48 | As of Python 3.8+, the default event loop on Windows is `proactor`, 49 | however psycopg3 requires the old default "selector" event loop. 50 | See https://www.psycopg.org/psycopg3/docs/advanced/async.html 51 | """ 52 | if platform.system() == "Windows": 53 | try: 54 | from asyncio import WindowsSelectorEventLoopPolicy # type:ignore 55 | except ImportError: 56 | pass # Can't assign a policy which doesn't exist. 57 | else: 58 | from asyncio import get_event_loop_policy, set_event_loop_policy 59 | 60 | if not isinstance(get_event_loop_policy(), WindowsSelectorEventLoopPolicy): 61 | set_event_loop_policy(WindowsSelectorEventLoopPolicy()) 62 | 63 | 64 | def _init_tortoise_0_24_1_patch() -> None: 65 | # this patch is for "tortoise-orm==0.24.1" to fix: 66 | # https://github.com/tortoise/tortoise-orm/issues/1893 67 | if tortoise.__version__ != "0.24.1": 68 | return 69 | from tortoise.backends.base.schema_generator import BaseSchemaGenerator 70 | 71 | target_func = "_get_m2m_tables" 72 | 73 | def _get_m2m_tables( # Copied from tortoise-orm 0.25 74 | self: BaseSchemaGenerator, 75 | model: type[Model], 76 | db_table: str, 77 | safe: bool, 78 | models_tables: list[str], 79 | ) -> list[str]: 80 | m2m_tables_for_create = [] 81 | for m2m_field in model._meta.m2m_fields: 82 | field_object = cast("ManyToManyFieldInstance", model._meta.fields_map[m2m_field]) 83 | if field_object._generated or field_object.through in models_tables: 84 | continue 85 | backward_key, forward_key = field_object.backward_key, field_object.forward_key 86 | if field_object.db_constraint: 87 | backward_fk = self._create_fk_string( 88 | "", 89 | backward_key, 90 | db_table, 91 | model._meta.db_pk_column, 92 | field_object.on_delete, 93 | "", 94 | ) 95 | forward_fk = self._create_fk_string( 96 | "", 97 | forward_key, 98 | field_object.related_model._meta.db_table, 99 | field_object.related_model._meta.db_pk_column, 100 | field_object.on_delete, 101 | "", 102 | ) 103 | else: 104 | backward_fk = forward_fk = "" 105 | exists = "IF NOT EXISTS " if safe else "" 106 | through_table_name = field_object.through 107 | backward_type = forward_type = comment = "" 108 | if func := getattr(self, "_get_pk_field_sql_type", None): 109 | backward_type = func(model._meta.pk) 110 | forward_type = func(field_object.related_model._meta.pk) 111 | if desc := field_object.description: 112 | comment = self._table_comment_generator(table=through_table_name, comment=desc) 113 | m2m_create_string = self.M2M_TABLE_TEMPLATE.format( 114 | exists=exists, 115 | table_name=through_table_name, 116 | backward_fk=backward_fk, 117 | forward_fk=forward_fk, 118 | backward_key=backward_key, 119 | backward_type=backward_type, 120 | forward_key=forward_key, 121 | forward_type=forward_type, 122 | extra=self._table_generate_extra(table=field_object.through), 123 | comment=comment, 124 | ) 125 | if not field_object.db_constraint: 126 | m2m_create_string = m2m_create_string.replace( 127 | """, 128 | , 129 | """, 130 | "", 131 | ) # may have better way 132 | m2m_create_string += self._post_table_hook() 133 | if getattr(field_object, "create_unique_index", field_object.unique): 134 | unique_index_create_sql = self._get_unique_index_sql( 135 | exists, through_table_name, [backward_key, forward_key] 136 | ) 137 | if unique_index_create_sql.endswith(";"): 138 | m2m_create_string += "\n" + unique_index_create_sql 139 | else: 140 | lines = m2m_create_string.splitlines() 141 | lines[-2] += "," 142 | indent = m.group() if (m := re.match(r"\s+", lines[-2])) else "" 143 | lines.insert(-1, indent + unique_index_create_sql) 144 | m2m_create_string = "\n".join(lines) 145 | m2m_tables_for_create.append(m2m_create_string) 146 | return m2m_tables_for_create 147 | 148 | setattr(BaseSchemaGenerator, target_func, _get_m2m_tables) 149 | -------------------------------------------------------------------------------- /tests/test_cli.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import os 4 | import time 5 | from collections.abc import Generator 6 | from pathlib import Path 7 | 8 | import pytest 9 | 10 | from aerich._compat import tomllib 11 | from aerich.cli import inspectdb, upgrade 12 | from tests._utils import chdir, prepare_py_files, requires_dialect, run_shell 13 | 14 | 15 | @pytest.fixture 16 | def new_project(tmp_work_dir: Path) -> Generator[Path]: 17 | prepare_py_files("migrate_no_input") 18 | run_shell("aerich init -t settings.TORTOISE_ORM", capture_output=False) 19 | run_shell("aerich init-db", capture_output=False) 20 | yield tmp_work_dir 21 | 22 | 23 | def test_empty_migrate_with_no_input(new_project: Path) -> None: 24 | output = run_shell("aerich migrate", cwd=new_project) 25 | assert "No changes detected" in output 26 | output = run_shell("aerich migrate --empty", cwd=new_project) 27 | assert "Success" in output 28 | migrate_dir = Path("migrations/models") 29 | empty_migration_files = list(migrate_dir.glob("1_*.py")) 30 | assert len(empty_migration_files) == 1 31 | time.sleep(1) # ensure new migration filename generated. 32 | run_shell("aerich migrate --empty --no-input", cwd=new_project) 33 | new_empty_migration_files = list(migrate_dir.glob("1_*.py")) 34 | assert len(new_empty_migration_files) == 1 35 | assert empty_migration_files != new_empty_migration_files 36 | 37 | 38 | @pytest.fixture 39 | async def project_with_unapplied_migrations(new_project: Path) -> None: 40 | models_py = Path("models.py") 41 | text = models_py.read_text() 42 | if "age" not in text: 43 | models_py.write_text(text + " age=fields.IntField()\n") 44 | run_shell("aerich migrate", cwd=new_project) 45 | 46 | 47 | def test_migrate_with_same_version_file_exists(project_with_unapplied_migrations) -> None: 48 | # CliRunner change the entire interpreter state, so run it in subprocess 49 | output = run_shell("pytest _tests.py") 50 | assert "1 passed" in output 51 | 52 | 53 | @requires_dialect("sqlite") 54 | @pytest.mark.usefixtures("tmp_work_dir") 55 | def test_auto_add_aerich_models() -> None: 56 | prepare_py_files("missing_aerich_models") 57 | output = run_shell("aerich init -t settings.TORTOISE_ORM_NO_AERICH_MODELS") 58 | assert "Success writing aerich config to pyproject.toml" in output 59 | output = run_shell("aerich init-db") 60 | db = "db.sqlite3" 61 | assert f'Success writing schemas to database "{db}"' in output 62 | with open("models.py", "a+") as f: 63 | f.write(" b = fields.IntField(null=True)\n") 64 | output = run_shell("aerich migrate") 65 | assert "Success" in output 66 | output = run_shell("aerich upgrade") 67 | assert "Success" in output 68 | 69 | 70 | @requires_dialect("sqlite") 71 | @pytest.mark.usefixtures("tmp_work_dir") 72 | def test_missing_aerich_models() -> None: 73 | prepare_py_files("missing_aerich_models") 74 | output = run_shell("aerich init -t settings.TORTOISE_ORM_MULTI_APPS_WITHOUT_AERICH_MODELS") 75 | assert "Success writing aerich config to pyproject.toml" in output 76 | output = run_shell("aerich init-db") 77 | assert "You have to add 'aerich.models' in the models of your tortoise config" in output 78 | 79 | output = run_shell("aerich init -t settings.TORTOISE_ORM_MULTI_APPS") 80 | assert "Success writing aerich config to pyproject.toml" in output 81 | output = run_shell("aerich migrate") 82 | assert "need to run `aerich init-db` first" in output 83 | output = run_shell("aerich upgrade") 84 | assert "need to run `aerich init-db` first" in output 85 | 86 | output = run_shell("aerich init-db") 87 | assert "Success" in output 88 | output = run_shell("aerich init -t settings.TORTOISE_ORM_MULTI_APPS_WITHOUT_AERICH_MODELS") 89 | assert "Success writing aerich config to pyproject.toml" in output 90 | output = run_shell("aerich migrate") 91 | assert "You have to add 'aerich.models' in the models of your tortoise config" in output 92 | output = run_shell("aerich upgrade") 93 | assert "You have to add 'aerich.models' in the models of your tortoise config" in output 94 | 95 | output = run_shell("aerich init -t settings.TORTOISE_ORM_MULTI_APPS") 96 | assert "Success" in output 97 | with open("models.py", "a+") as f: 98 | f.write(" b = fields.IntField(null=True)\n") 99 | output = run_shell("aerich migrate") 100 | assert "Success" in output 101 | output = run_shell("aerich upgrade") 102 | assert "Success" in output 103 | 104 | 105 | @pytest.mark.usefixtures("tmp_work_dir") 106 | def test_aerich_init() -> None: 107 | prepare_py_files("missing_aerich_models") 108 | toml_file = Path("pyproject.toml") 109 | # init without pyproject.toml 110 | output = run_shell("aerich init -t settings.TORTOISE_ORM") 111 | assert toml_file.exists() 112 | assert f"Success writing aerich config to {toml_file}" in output 113 | doc: dict = tomllib.loads(toml_file.read_text("utf-8")) 114 | assert doc["tool"]["aerich"]["tortoise_orm"] == "settings.TORTOISE_ORM" 115 | modified_at = toml_file.stat().st_mtime 116 | # init again does not changed the modify time of config file 117 | output = run_shell("aerich init -t settings.TORTOISE_ORM") 118 | assert f"Aerich config {toml_file} already inited." in output 119 | assert modified_at == toml_file.stat().st_mtime 120 | # modify without comment line in config file 121 | output = run_shell("aerich init -t settings.TORTOISE_ORM_NO_AERICH_MODELS") 122 | assert f"Success writing aerich config to {toml_file}" in output 123 | doc = tomllib.loads(toml_file.read_text("utf-8")) 124 | assert doc["tool"]["aerich"]["tortoise_orm"] == "settings.TORTOISE_ORM_NO_AERICH_MODELS" 125 | # init will not remove comment line in config file 126 | comment_line = "# This is a comment line." 127 | with toml_file.open("a", encoding="utf-8") as f: 128 | f.writelines([os.linesep, comment_line + os.linesep]) 129 | output = run_shell("aerich init -t settings.TORTOISE_ORM") 130 | assert f"Success writing aerich config to {toml_file}" in output 131 | text = toml_file.read_text("utf-8") 132 | assert comment_line in text 133 | doc = tomllib.loads(text) 134 | assert doc["tool"]["aerich"]["tortoise_orm"] == "settings.TORTOISE_ORM" 135 | # In line comment will not remove either 136 | content = os.linesep.join([comment_line, "[tool.mypy]", "pretty=true # comment-2"]) 137 | toml_file.write_text(content, encoding="utf-8") 138 | output = run_shell("aerich init -t settings.TORTOISE_ORM") 139 | assert f"Success writing aerich config to {toml_file}" in output 140 | text = toml_file.read_text("utf-8") 141 | assert comment_line in text 142 | assert "comment-2" in text 143 | doc = tomllib.loads(text) 144 | assert doc["tool"]["aerich"]["tortoise_orm"] == "settings.TORTOISE_ORM" 145 | 146 | 147 | def test_help(tmp_path): 148 | output = run_shell("aerich --help") 149 | assert output == run_shell("aerich -h") 150 | assert str(upgrade.help) in output 151 | assert "--fake" not in output 152 | output = run_shell("aerich upgrade --help") 153 | assert output == run_shell("aerich upgrade -h") 154 | assert str(upgrade.help) in output 155 | assert "--fake" in output 156 | with chdir(tmp_path): 157 | output = run_shell(f"aerich {inspectdb.name} --help") 158 | assert output == run_shell(f"aerich {inspectdb.name} -h") 159 | assert str(inspectdb.help) in output 160 | assert "--table" in output 161 | -------------------------------------------------------------------------------- /tests/_utils.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import contextlib 4 | import functools 5 | import os 6 | import platform 7 | import shlex 8 | import shutil 9 | import subprocess 10 | import sys 11 | from collections.abc import Generator 12 | from pathlib import Path 13 | from typing import Callable, Literal 14 | 15 | import pytest 16 | from tortoise import Tortoise, generate_schema_for_client 17 | from tortoise.exceptions import DBConnectionError, OperationalError 18 | from tortoise.indexes import Index 19 | 20 | from aerich import Command 21 | from aerich._compat import tortoise_version_less_than 22 | 23 | if sys.version_info >= (3, 11): 24 | from contextlib import chdir 25 | else: 26 | 27 | class chdir(contextlib.AbstractContextManager): # Copied from source code of Python3.13 28 | """Non thread-safe context manager to change the current working directory.""" 29 | 30 | def __init__(self, path): 31 | self.path = path 32 | self._old_cwd = [] 33 | 34 | def __enter__(self): 35 | self._old_cwd.append(os.getcwd()) 36 | os.chdir(self.path) 37 | 38 | def __exit__(self, *excinfo): 39 | os.chdir(self._old_cwd.pop()) 40 | 41 | 42 | async def drop_db(tortoise_orm) -> None: 43 | # Placing init outside the try-block(suppress) since it doesn't 44 | # establish connections to the DB eagerly. 45 | await Tortoise.init(config=tortoise_orm) 46 | with contextlib.suppress(DBConnectionError, OperationalError): 47 | await Tortoise._drop_databases() 48 | await Command.aclose() 49 | 50 | 51 | async def init_db(tortoise_orm, generate_schemas=True) -> None: 52 | await drop_db(tortoise_orm) 53 | await Tortoise.init(config=tortoise_orm, _create_db=True) 54 | if generate_schemas: 55 | await generate_schema_for_client(Tortoise.get_connection("default"), safe=True) 56 | await Command.aclose() 57 | 58 | 59 | class Dialect: 60 | test_db_url: str 61 | 62 | @classmethod 63 | def load_env(cls) -> None: 64 | if getattr(cls, "test_db_url", None) is None: 65 | cls.test_db_url = os.getenv("TEST_DB", "") 66 | 67 | @classmethod 68 | def is_postgres(cls) -> bool: 69 | cls.load_env() 70 | return "postgres" in cls.test_db_url 71 | 72 | @classmethod 73 | def is_mysql(cls) -> bool: 74 | cls.load_env() 75 | return "mysql" in cls.test_db_url 76 | 77 | @classmethod 78 | def is_sqlite(cls) -> bool: 79 | cls.load_env() 80 | return not cls.test_db_url or "sqlite" in cls.test_db_url 81 | 82 | @classmethod 83 | def check(cls, name: Literal["sqlite", "mysql", "postgres"]) -> bool: 84 | func = getattr(cls, f"is_{name}") 85 | return func() 86 | 87 | 88 | ASSETS = Path(__file__).parent / "assets" 89 | WINDOWS = platform.system() == "Windows" 90 | 91 | 92 | def run_in_subprocess(command: str, capture_output=True, **kw) -> tuple[bool, str]: 93 | if WINDOWS: 94 | py = Path(sys.executable).as_posix() 95 | if command.startswith("aerich "): 96 | command = f"{py} -m " + command 97 | elif command.startswith(s := "python "): 98 | command = f"{py} " + command[len(s) :] 99 | r = subprocess.run(shlex.split(command), capture_output=capture_output, encoding="utf-8") 100 | ok = r.returncode == 0 101 | out = (r.stdout or "") if ok else (r.stderr or r.stdout or "") 102 | return ok, out 103 | 104 | 105 | def run_shell(command: str, capture_output=True, **kw) -> str: 106 | return run_in_subprocess(command, capture_output, **kw)[1] 107 | 108 | 109 | def _copy_file_with_symlink_target_followed( 110 | src: Path, target_dir: Path | str = ".", parent=ASSETS 111 | ) -> None: 112 | filename = src.name 113 | dst = Path(target_dir, "conftest.py" if filename == "conftest_.py" else filename) 114 | if WINDOWS: 115 | content = src.read_bytes() 116 | if content.startswith(b".."): 117 | shutil.copy(parent / filename, dst) 118 | else: 119 | dst.write_bytes(content) 120 | else: 121 | shutil.copy(src, dst) 122 | 123 | 124 | @functools.cache 125 | def get_symlink_targets(parent: Path = ASSETS) -> set[str]: 126 | return {i.name for i in parent.glob("*.py")} 127 | 128 | 129 | def copy_files(*src_files: Path, target_dir: Path | str = ".", parent: Path | None = None) -> None: 130 | if parent is None: 131 | parent = src_files[0].parent 132 | symlink_targets = get_symlink_targets(parent) 133 | for src in src_files: 134 | if src.name in symlink_targets: 135 | _copy_file_with_symlink_target_followed(src, target_dir, parent) 136 | else: 137 | shutil.copy(src, target_dir) 138 | 139 | 140 | def prepare_py_files( 141 | asset_name: str, assets: Path = ASSETS, suffix: str = ".py", with_testing_models: bool = False 142 | ) -> Path: 143 | asset_dir = assets / asset_name 144 | copy_files(*asset_dir.glob(f"*{suffix}"), parent=assets) 145 | if with_testing_models: 146 | test_dir = assets.parent 147 | copy_files(test_dir / "models_second.py", test_dir / "models.py") 148 | dst_dir = Path("tests") 149 | dst_dir.mkdir() 150 | dst_dir.joinpath("__init__.py").touch() 151 | copy_files(test_dir / "_utils.py", test_dir / "indexes.py", target_dir=dst_dir) 152 | return asset_dir 153 | 154 | 155 | def copy_asset(name: str, parent: Path = ASSETS) -> None: 156 | asset_dir = parent / name 157 | symlink_targets = get_symlink_targets(parent) 158 | for p in asset_dir.glob("*"): 159 | filename = p.name 160 | if filename.startswith("."): 161 | continue 162 | if filename in symlink_targets: 163 | _copy_file_with_symlink_target_followed(p, parent=parent) 164 | else: 165 | copy_func = shutil.copytree if p.is_dir() else shutil.copyfile 166 | copy_func(p, "conftest.py" if p.name == "conftest_.py" else p.name) 167 | 168 | 169 | def skip_dialect(name: Literal["sqlite", "mysql", "postgres"]) -> Callable: 170 | return pytest.mark.skipif(Dialect.check(name), reason=f"Skip dialect {name!r}") 171 | 172 | 173 | def requires_dialect( 174 | name: Literal["sqlite", "mysql", "postgres"], 175 | *more: Literal["sqlite", "mysql", "postgres"], 176 | ) -> Callable: 177 | if more: 178 | vals = {name, *more} 179 | for name in vals: 180 | if Dialect.check(name): 181 | return pytest.mark.skipif(False, reason="") 182 | return pytest.mark.skipif(True, reason=f"Capability dialect not in {list(vals)}") 183 | return pytest.mark.skipif(not Dialect.check(name), reason=f"Capability dialect != {name}") 184 | 185 | 186 | def requires_env(name: str) -> Callable: 187 | return pytest.mark.skipif( 188 | not (_v := os.getenv(name)) or _v.lower() not in ("1", "on", "yes", "true"), 189 | reason=f"Skip as os env {name!r} is not true", 190 | ) 191 | 192 | 193 | @contextlib.contextmanager 194 | def tmp_daily_db(env_name="AERICH_DONT_DROP_TMP_DB") -> Generator[None]: 195 | me = Path(__file__) 196 | if not me.is_relative_to(Path.cwd()): 197 | shutil.copy(me, ".") 198 | run_in_subprocess("python db.py drop") 199 | ok, out = run_in_subprocess("python db.py create") 200 | if not ok: 201 | raise OperationalError(out) 202 | try: 203 | yield 204 | finally: 205 | if not os.getenv(env_name): 206 | ok, out = run_in_subprocess("python db.py drop") 207 | if not ok: 208 | raise OperationalError(out) 209 | 210 | 211 | def describe_index(idx: Index) -> Index | dict: 212 | # tortoise-orm>=0.24 changes Index desribe to be dict 213 | if tortoise_version_less_than("0.24"): 214 | return idx 215 | if hasattr(idx, "describe"): 216 | return idx.describe() 217 | return idx 218 | -------------------------------------------------------------------------------- /aerich/inspectdb/__init__.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import contextlib 4 | from dataclasses import dataclass 5 | from functools import partial 6 | from typing import Any, Callable, TypedDict 7 | 8 | from tortoise import BaseDBAsyncClient 9 | 10 | from aerich.exceptions import NotSupportError 11 | 12 | 13 | class ColumnInfoDict(TypedDict): 14 | name: str 15 | pk: str 16 | index: str 17 | null: str 18 | default: str 19 | length: str 20 | comment: str 21 | 22 | 23 | FieldMapDict = dict[str, Callable[..., str]] 24 | 25 | 26 | @dataclass 27 | class Column: 28 | name: str 29 | data_type: str 30 | null: bool 31 | default: Any 32 | pk: bool 33 | unique: bool 34 | index: bool 35 | comment: str | None = None 36 | length: int | None = None 37 | extra: str | None = None 38 | decimal_places: int | None = None 39 | max_digits: int | None = None 40 | 41 | @staticmethod 42 | def trans_default(value: str, data_type: str, extra: str | None) -> str: 43 | if data_type in ("tinyint", "INT"): 44 | default = f"default={'True' if value == '1' else 'False'}, " 45 | elif data_type == "bool": 46 | default = f"default={'True' if value == 'true' else 'False'}, " 47 | elif data_type in ("datetime", "timestamptz", "TIMESTAMP"): 48 | if value == "CURRENT_TIMESTAMP": 49 | if extra == "DEFAULT_GENERATED on update CURRENT_TIMESTAMP": 50 | default = "auto_now=True, " 51 | else: 52 | default = "auto_now_add=True, " 53 | else: 54 | default = "" 55 | else: 56 | if "::" in value: 57 | default = f"default={value.split('::')[0]}, " 58 | elif value.endswith("()"): 59 | default = "" 60 | elif value == "": 61 | default = 'default="", ' 62 | else: 63 | default = f"default={value}, " 64 | return default 65 | 66 | def translate(self) -> ColumnInfoDict: 67 | comment = default = length = index = null = pk = "" 68 | if self.pk: 69 | pk = "primary_key=True, " 70 | else: 71 | if self.unique: 72 | index = "unique=True, " 73 | elif self.index: 74 | index = "db_index=True, " 75 | if self.default is not None: 76 | default = self.trans_default(self.default, self.data_type, self.extra) 77 | if self.data_type in ("varchar", "VARCHAR"): 78 | length = f"max_length={self.length}, " 79 | elif self.data_type in ("decimal", "numeric"): 80 | length_parts = [] 81 | if self.max_digits: 82 | length_parts.append(f"max_digits={self.max_digits}") 83 | if self.decimal_places: 84 | length_parts.append(f"decimal_places={self.decimal_places}") 85 | if length_parts: 86 | length = ", ".join(length_parts) + ", " 87 | if self.null: 88 | null = "null=True, " 89 | if self.comment: 90 | comment = f"description='{self.comment}', " 91 | return { 92 | "name": self.name, 93 | "pk": pk, 94 | "index": index, 95 | "null": null, 96 | "default": default, 97 | "length": length, 98 | "comment": comment, 99 | } 100 | 101 | 102 | class Inspect: 103 | _table_template = "class {table}(Model):\n" 104 | 105 | def __init__( 106 | self, 107 | conn: BaseDBAsyncClient, 108 | tables: list[str] | None = None, 109 | special_fields: dict[str, str] | None = None, 110 | ) -> None: 111 | self.conn = conn 112 | with contextlib.suppress(AttributeError): 113 | self.database = conn.database # type:ignore[attr-defined] 114 | self.tables = tables 115 | self._special_fields = special_fields 116 | 117 | @property 118 | def field_map(self) -> FieldMapDict: 119 | raise NotImplementedError 120 | 121 | async def inspect(self) -> str: 122 | if not self.tables: 123 | self.tables = await self.get_all_tables() 124 | imports = ["from tortoise import Model, fields"] 125 | tables = [] 126 | for table in self.tables: 127 | columns = await self.get_columns(table) 128 | fields = [] 129 | model = self._table_template.format(table=table.title().replace("_", "")) 130 | for column in columns: 131 | try: 132 | trans_func = self.field_map[column.data_type] 133 | except KeyError as e: 134 | if not self._special_fields or column.data_type not in self._special_fields: 135 | raise NotSupportError( 136 | f"Can't translate {column.data_type=} to be tortoise field" 137 | ) from e 138 | field_class = self._special_fields[column.data_type] 139 | is_normal_field = True 140 | if "." in field_class: # e.g.: tortoise.contrib.mysql.fields.GeometryField 141 | module, field_class = field_class.rsplit(".", 1) 142 | if module != "fields": 143 | imports.append(f"from {module} import {field_class}") 144 | is_normal_field = False 145 | trans_func = partial( 146 | self.get_field_string, field_class, is_normal_field=is_normal_field 147 | ) 148 | field = trans_func(**column.translate()) 149 | fields.append(" " + field) 150 | tables.append(model + "\n".join(fields)) 151 | result = "\n".join(imports) + "\n\n" 152 | return result + "\n\n\n".join(tables) 153 | 154 | async def get_columns(self, table: str) -> list[Column]: 155 | raise NotImplementedError 156 | 157 | async def get_all_tables(self) -> list[str]: 158 | raise NotImplementedError 159 | 160 | @staticmethod 161 | def get_field_string( 162 | field_class: str, 163 | arguments: str = "{null}{default}{comment}", 164 | is_normal_field: bool = True, 165 | **kwargs, 166 | ) -> str: 167 | name = kwargs["name"] 168 | field_params = arguments.format(**kwargs).strip().rstrip(",") 169 | if is_normal_field: 170 | field_class = "fields." + field_class 171 | return f"{name} = {field_class}({field_params})" 172 | 173 | @classmethod 174 | def decimal_field(cls, **kwargs) -> str: 175 | return cls.get_field_string("DecimalField", **kwargs) 176 | 177 | @classmethod 178 | def time_field(cls, **kwargs) -> str: 179 | return cls.get_field_string("TimeField", **kwargs) 180 | 181 | @classmethod 182 | def date_field(cls, **kwargs) -> str: 183 | return cls.get_field_string("DateField", **kwargs) 184 | 185 | @classmethod 186 | def float_field(cls, **kwargs) -> str: 187 | return cls.get_field_string("FloatField", **kwargs) 188 | 189 | @classmethod 190 | def datetime_field(cls, **kwargs) -> str: 191 | return cls.get_field_string("DatetimeField", **kwargs) 192 | 193 | @classmethod 194 | def text_field(cls, **kwargs) -> str: 195 | return cls.get_field_string("TextField", **kwargs) 196 | 197 | @classmethod 198 | def char_field(cls, **kwargs) -> str: 199 | arguments = "{pk}{index}{length}{null}{default}{comment}" 200 | return cls.get_field_string("CharField", arguments, **kwargs) 201 | 202 | @classmethod 203 | def int_field(cls, field_class="IntField", **kwargs) -> str: 204 | arguments = "{pk}{index}{default}{comment}" 205 | return cls.get_field_string(field_class, arguments, **kwargs) 206 | 207 | @classmethod 208 | def smallint_field(cls, **kwargs) -> str: 209 | return cls.int_field("SmallIntField", **kwargs) 210 | 211 | @classmethod 212 | def bigint_field(cls, **kwargs) -> str: 213 | return cls.int_field("BigIntField", **kwargs) 214 | 215 | @classmethod 216 | def bool_field(cls, **kwargs) -> str: 217 | return cls.get_field_string("BooleanField", **kwargs) 218 | 219 | @classmethod 220 | def uuid_field(cls, **kwargs) -> str: 221 | arguments = "{pk}{index}{default}{comment}" 222 | return cls.get_field_string("UUIDField", arguments, **kwargs) 223 | 224 | @classmethod 225 | def json_field(cls, **kwargs) -> str: 226 | return cls.get_field_string("JSONField", **kwargs) 227 | 228 | @classmethod 229 | def binary_field(cls, **kwargs) -> str: 230 | return cls.get_field_string("BinaryField", **kwargs) 231 | -------------------------------------------------------------------------------- /README_RU.md: -------------------------------------------------------------------------------- 1 | # Aerich 2 | 3 | [![image](https://img.shields.io/pypi/v/aerich.svg?style=flat)](https://pypi.python.org/pypi/aerich) 4 | [![image](https://img.shields.io/github/license/tortoise/aerich)](https://github.com/tortoise/aerich) 5 | [![image](https://github.com/tortoise/aerich/workflows/pypi/badge.svg)](https://github.com/tortoise/aerich/actions?query=workflow:pypi) 6 | [![image](https://github.com/tortoise/aerich/workflows/ci/badge.svg)](https://github.com/tortoise/aerich/actions?query=workflow:ci) 7 | 8 | [English](./README.md) | Русский 9 | 10 | ## Введение 11 | 12 | Aerich - это инструмент для миграции базы данных для TortoiseORM, который аналогичен Alembic для SQLAlchemy или встроенному решению миграций в Django ORM. 13 | 14 | ## Установка 15 | 16 | Просто установите из pypi: 17 | 18 | ```shell 19 | pip install aerich 20 | ``` 21 | 22 | ## Быстрый старт 23 | 24 | ```shell 25 | > aerich -h 26 | 27 | Usage: aerich [OPTIONS] COMMAND [ARGS]... 28 | 29 | Options: 30 | -V, --version Show the version and exit. 31 | -c, --config TEXT Config file. [default: pyproject.toml] 32 | --app TEXT Tortoise-ORM app name. 33 | -h, --help Show this message and exit. 34 | 35 | Commands: 36 | downgrade Downgrade to specified version. 37 | heads Show current available heads in migrate location. 38 | history List all migrate items. 39 | init Init config file and generate root migrate location. 40 | init-db Generate schema and generate app migrate location. 41 | inspectdb Introspects the database tables to standard output as... 42 | migrate Generate migrate changes file. 43 | upgrade Upgrade to specified version. 44 | ``` 45 | 46 | ## Использование 47 | 48 | Сначала вам нужно добавить aerich.models в конфигурацию вашего Tortoise-ORM. Пример: 49 | 50 | ```python 51 | TORTOISE_ORM = { 52 | "connections": {"default": "mysql://root:123456@127.0.0.1:3306/test"}, 53 | "apps": { 54 | "models": { 55 | "models": ["tests.models", "aerich.models"], 56 | "default_connection": "default", 57 | }, 58 | }, 59 | } 60 | ``` 61 | 62 | ### Инициализация 63 | 64 | ```shell 65 | > aerich init -h 66 | 67 | Usage: aerich init [OPTIONS] 68 | 69 | Init config file and generate root migrate location. 70 | 71 | Options: 72 | -t, --tortoise-orm TEXT Tortoise-ORM config module dict variable, like 73 | settings.TORTOISE_ORM. [required] 74 | --location TEXT Migrate store location. [default: ./migrations] 75 | -s, --src_folder TEXT Folder of the source, relative to the project root. 76 | -h, --help Show this message and exit. 77 | ``` 78 | 79 | Инициализируйте файл конфигурации и задайте местоположение миграций: 80 | 81 | ```shell 82 | > aerich init -t tests.backends.mysql.TORTOISE_ORM 83 | 84 | Success create migrate location ./migrations 85 | Success write config to pyproject.toml 86 | ``` 87 | 88 | ### Инициализация базы данных 89 | 90 | ```shell 91 | > aerich init-db 92 | 93 | Success create app migrate location ./migrations/models 94 | Success generate schema for app "models" 95 | ``` 96 | 97 | Если ваше приложение Tortoise-ORM не является приложением по умолчанию с именем models, вы должны указать правильное имя приложения с помощью параметра --app, например: aerich --app other_models init-db. 98 | 99 | ### Обновление моделей и создание миграции 100 | 101 | ```shell 102 | > aerich migrate --name drop_column 103 | 104 | Success migrate 1_202029051520102929_drop_column.py 105 | ``` 106 | 107 | Формат имени файла миграции следующий: `{версия}_{дата_и_время}_{имя|обновление}.py`. 108 | 109 | Если aerich предполагает, что вы переименовываете столбец, он спросит: 110 | Переименовать `{старый_столбец} в {новый_столбец} [True]`. Вы можете выбрать `True`, 111 | чтобы переименовать столбец без удаления столбца, или выбрать `False`, чтобы удалить столбец, 112 | а затем создать новый. Обратите внимание, что последний вариант может привести к потере данных. 113 | 114 | 115 | ### Обновление до последней версии 116 | 117 | ```shell 118 | > aerich upgrade 119 | 120 | Success upgrade 1_202029051520102929_drop_column.py 121 | ``` 122 | 123 | Теперь ваша база данных обновлена до последней версии. 124 | 125 | ### Откат до указанной версии 126 | 127 | ```shell 128 | > aerich downgrade -h 129 | 130 | Usage: aerich downgrade [OPTIONS] 131 | 132 | Downgrade to specified version. 133 | 134 | Options: 135 | -v, --version INTEGER Specified version, default to last. [default: -1] 136 | -d, --delete Delete version files at the same time. [default: 137 | False] 138 | 139 | --yes Confirm the action without prompting. 140 | -h, --help Show this message and exit. 141 | ``` 142 | 143 | ```shell 144 | > aerich downgrade 145 | 146 | Success downgrade 1_202029051520102929_drop_column.py 147 | ``` 148 | 149 | Теперь ваша база данных откатилась до указанной версии. 150 | 151 | ### Показать историю 152 | 153 | ```shell 154 | > aerich history 155 | 156 | 1_202029051520102929_drop_column.py 157 | ``` 158 | 159 | ### Чтобы узнать, какие миграции должны быть применены, можно использовать команду: 160 | 161 | ```shell 162 | > aerich heads 163 | 164 | 1_202029051520102929_drop_column.py 165 | ``` 166 | 167 | ### Осмотр таблиц базы данных для модели TortoiseORM 168 | 169 | В настоящее время inspectdb поддерживает MySQL, Postgres и SQLite. 170 | 171 | ```shell 172 | Usage: aerich inspectdb [OPTIONS] 173 | 174 | Introspects the database tables to standard output as TortoiseORM model. 175 | 176 | Options: 177 | -t, --table TEXT Which tables to inspect. 178 | -h, --help Show this message and exit. 179 | ``` 180 | 181 | Посмотреть все таблицы и вывести их на консоль: 182 | 183 | ```shell 184 | aerich --app models inspectdb 185 | ``` 186 | 187 | Осмотреть указанную таблицу в приложении по умолчанию и перенаправить в models.py: 188 | 189 | ```shell 190 | aerich inspectdb -t user > models.py 191 | ``` 192 | 193 | Например, ваша таблица выглядит следующим образом: 194 | 195 | ```sql 196 | CREATE TABLE `test` 197 | ( 198 | `id` int NOT NULL AUTO_INCREMENT, 199 | `decimal` decimal(10, 2) NOT NULL, 200 | `date` date DEFAULT NULL, 201 | `datetime` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, 202 | `time` time DEFAULT NULL, 203 | `float` float DEFAULT NULL, 204 | `string` varchar(200) COLLATE utf8mb4_general_ci DEFAULT NULL, 205 | `tinyint` tinyint DEFAULT NULL, 206 | PRIMARY KEY (`id`), 207 | KEY `asyncmy_string_index` (`string`) 208 | ) ENGINE = InnoDB 209 | DEFAULT CHARSET = utf8mb4 210 | COLLATE = utf8mb4_general_ci 211 | ``` 212 | 213 | Теперь выполните команду aerich inspectdb -t test, чтобы увидеть сгенерированную модель: 214 | 215 | ```python 216 | from tortoise import Model, fields 217 | 218 | 219 | class Test(Model): 220 | date = fields.DateField(null=True, ) 221 | datetime = fields.DatetimeField(auto_now=True, ) 222 | decimal = fields.DecimalField(max_digits=10, decimal_places=2, ) 223 | float = fields.FloatField(null=True, ) 224 | id = fields.IntField(pk=True, ) 225 | string = fields.CharField(max_length=200, null=True, ) 226 | time = fields.TimeField(null=True, ) 227 | tinyint = fields.BooleanField(null=True, ) 228 | ``` 229 | 230 | Обратите внимание, что эта команда имеет ограничения и не может автоматически определить некоторые поля, такие как `IntEnumField`, `ForeignKeyField` и другие. 231 | 232 | ### Несколько баз данных 233 | 234 | ```python 235 | tortoise_orm = { 236 | "connections": { 237 | "default": expand_db_url(db_url, True), 238 | "second": expand_db_url(db_url_second, True), 239 | }, 240 | "apps": { 241 | "models": {"models": ["tests.models", "aerich.models"], "default_connection": "default"}, 242 | "models_second": {"models": ["tests.models_second"], "default_connection": "second", }, 243 | }, 244 | } 245 | ``` 246 | 247 | Вам нужно указать `aerich.models` только в одном приложении и должны указывать `--app` при запуске команды `aerich migrate` и т.д. 248 | 249 | ## Восстановление рабочего процесса aerich 250 | 251 | В некоторых случаях, например, при возникновении проблем после обновления `aerich`, вы не можете запустить `aerich migrate` или `aerich upgrade`. В таком случае вы можете выполнить следующие шаги: 252 | 253 | 1. удалите таблицы `aerich`. 254 | 2. удалите директорию `migrations/{app}`. 255 | 3. rerun `aerich init-db`. 256 | 257 | Обратите внимание, что эти действия безопасны, и вы можете использовать их для сброса миграций, если у вас слишком много файлов миграции. 258 | 259 | ## Использование aerich в приложении 260 | 261 | Вы можете использовать `aerich` вне командной строки, используя класс `Command`. 262 | 263 | ```python 264 | from aerich import Command 265 | 266 | command = Command(tortoise_config=config, app='models') 267 | await command.init() 268 | await command.migrate('test') 269 | ``` 270 | 271 | ## Лицензия 272 | 273 | Этот проект лицензирован в соответствии с лицензией 274 | [Apache-2.0](https://github.com/long2ice/aerich/blob/master/LICENSE) Лицензия. -------------------------------------------------------------------------------- /tests/test_ddl.py: -------------------------------------------------------------------------------- 1 | import tortoise 2 | 3 | from aerich.ddl.mysql import MysqlDDL 4 | from aerich.ddl.postgres import PostgresDDL 5 | from aerich.ddl.sqlite import SqliteDDL 6 | from aerich.migrate import Migrate 7 | from tests.models import Category, Product, User 8 | 9 | 10 | def test_create_table(): 11 | ret = Migrate.ddl.create_table(Category) 12 | if isinstance(Migrate.ddl, MysqlDDL): 13 | if tortoise.__version__ >= "0.24": 14 | assert ( 15 | ret 16 | == """CREATE TABLE IF NOT EXISTS `category` ( 17 | `id` INT NOT NULL PRIMARY KEY AUTO_INCREMENT, 18 | `slug` VARCHAR(100) NOT NULL, 19 | `name` VARCHAR(200), 20 | `title` VARCHAR(20) NOT NULL, 21 | `created_at` DATETIME(6) NOT NULL DEFAULT CURRENT_TIMESTAMP(6), 22 | `owner_id` INT NOT NULL COMMENT 'User', 23 | CONSTRAINT `fk_category_user_110d4c63` FOREIGN KEY (`owner_id`) REFERENCES `user` (`id`) ON DELETE CASCADE, 24 | FULLTEXT KEY `idx_category_slug_e9bcff` (`slug`) 25 | ) CHARACTER SET utf8mb4""" 26 | ) 27 | return 28 | assert ( 29 | ret 30 | == """CREATE TABLE IF NOT EXISTS `category` ( 31 | `id` INT NOT NULL PRIMARY KEY AUTO_INCREMENT, 32 | `slug` VARCHAR(100) NOT NULL, 33 | `name` VARCHAR(200), 34 | `title` VARCHAR(20) NOT NULL, 35 | `created_at` DATETIME(6) NOT NULL DEFAULT CURRENT_TIMESTAMP(6), 36 | `owner_id` INT NOT NULL COMMENT 'User', 37 | CONSTRAINT `fk_category_user_110d4c63` FOREIGN KEY (`owner_id`) REFERENCES `user` (`id`) ON DELETE CASCADE 38 | ) CHARACTER SET utf8mb4; 39 | CREATE FULLTEXT INDEX `idx_category_slug_e9bcff` ON `category` (`slug`)""" 40 | ) 41 | 42 | elif isinstance(Migrate.ddl, SqliteDDL): 43 | exists = "IF NOT EXISTS " if tortoise.__version__ >= "0.24" else "" 44 | assert ( 45 | ret 46 | == f"""CREATE TABLE IF NOT EXISTS "category" ( 47 | "id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, 48 | "slug" VARCHAR(100) NOT NULL, 49 | "name" VARCHAR(200), 50 | "title" VARCHAR(20) NOT NULL, 51 | "created_at" TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, 52 | "owner_id" INT NOT NULL REFERENCES "user" ("id") ON DELETE CASCADE /* User */ 53 | ); 54 | CREATE INDEX {exists}"idx_category_slug_e9bcff" ON "category" ("slug")""" 55 | ) 56 | 57 | elif isinstance(Migrate.ddl, PostgresDDL): 58 | assert ( 59 | ret 60 | == """CREATE TABLE IF NOT EXISTS "category" ( 61 | "id" SERIAL NOT NULL PRIMARY KEY, 62 | "slug" VARCHAR(100) NOT NULL, 63 | "name" VARCHAR(200), 64 | "title" VARCHAR(20) NOT NULL, 65 | "created_at" TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP, 66 | "owner_id" INT NOT NULL REFERENCES "user" ("id") ON DELETE CASCADE 67 | ); 68 | CREATE INDEX IF NOT EXISTS "idx_category_slug_e9bcff" ON "category" USING HASH ("slug"); 69 | COMMENT ON COLUMN "category"."owner_id" IS 'User'""" 70 | ) 71 | 72 | 73 | def test_drop_table(): 74 | ret = Migrate.ddl.drop_table(Category._meta.db_table) 75 | if isinstance(Migrate.ddl, MysqlDDL): 76 | assert ret == "DROP TABLE IF EXISTS `category`" 77 | else: 78 | assert ret == 'DROP TABLE IF EXISTS "category"' 79 | 80 | 81 | def test_add_column(): 82 | ret = Migrate.ddl.add_column(Category, Category._meta.fields_map["name"].describe(False)) 83 | if isinstance(Migrate.ddl, MysqlDDL): 84 | assert ret == "ALTER TABLE `category` ADD `name` VARCHAR(200)" 85 | else: 86 | assert ret == 'ALTER TABLE "category" ADD "name" VARCHAR(200)' 87 | # add unique column 88 | ret = Migrate.ddl.add_column(User, User._meta.fields_map["username"].describe(False)) 89 | if isinstance(Migrate.ddl, MysqlDDL): 90 | assert ret == "ALTER TABLE `user` ADD `username` VARCHAR(20) NOT NULL UNIQUE" 91 | elif isinstance(Migrate.ddl, PostgresDDL): 92 | assert ret == 'ALTER TABLE "user" ADD "username" VARCHAR(20) NOT NULL UNIQUE' 93 | else: 94 | assert ret == 'ALTER TABLE "user" ADD "username" VARCHAR(20) NOT NULL' 95 | 96 | 97 | def test_modify_column(): 98 | if isinstance(Migrate.ddl, SqliteDDL): 99 | return 100 | 101 | ret0 = Migrate.ddl.modify_column(Category, Category._meta.fields_map["name"].describe(False)) 102 | ret1 = Migrate.ddl.modify_column(User, User._meta.fields_map["is_active"].describe(False)) 103 | if isinstance(Migrate.ddl, MysqlDDL): 104 | assert ret0 == "ALTER TABLE `category` MODIFY COLUMN `name` VARCHAR(200)" 105 | assert ( 106 | ret1 107 | == "ALTER TABLE `user` MODIFY COLUMN `is_active` BOOL NOT NULL COMMENT 'Is Active' DEFAULT 1" 108 | ) 109 | elif isinstance(Migrate.ddl, PostgresDDL): 110 | assert ( 111 | ret0 112 | == 'ALTER TABLE "category" ALTER COLUMN "name" TYPE VARCHAR(200) USING "name"::VARCHAR(200)' 113 | ) 114 | 115 | assert ( 116 | ret1 == 'ALTER TABLE "user" ALTER COLUMN "is_active" TYPE BOOL USING "is_active"::BOOL' 117 | ) 118 | 119 | 120 | def test_alter_column_default(): 121 | if isinstance(Migrate.ddl, SqliteDDL): 122 | return 123 | ret = Migrate.ddl.alter_column_default(User, User._meta.fields_map["intro"].describe(False)) 124 | if isinstance(Migrate.ddl, PostgresDDL): 125 | assert ret == 'ALTER TABLE "user" ALTER COLUMN "intro" SET DEFAULT \'\'' 126 | elif isinstance(Migrate.ddl, MysqlDDL): 127 | assert ret == "ALTER TABLE `user` ALTER COLUMN `intro` SET DEFAULT ''" 128 | 129 | ret = Migrate.ddl.alter_column_default( 130 | Category, Category._meta.fields_map["created_at"].describe(False) 131 | ) 132 | if isinstance(Migrate.ddl, PostgresDDL): 133 | assert ( 134 | ret == 'ALTER TABLE "category" ALTER COLUMN "created_at" SET DEFAULT CURRENT_TIMESTAMP' 135 | ) 136 | elif isinstance(Migrate.ddl, MysqlDDL): 137 | assert ( 138 | ret 139 | == "ALTER TABLE `category` ALTER COLUMN `created_at` SET DEFAULT CURRENT_TIMESTAMP(6)" 140 | ) 141 | 142 | ret = Migrate.ddl.alter_column_default( 143 | Product, Product._meta.fields_map["view_num"].describe(False) 144 | ) 145 | if isinstance(Migrate.ddl, PostgresDDL): 146 | assert ret == 'ALTER TABLE "product" ALTER COLUMN "view_num" SET DEFAULT 0' 147 | elif isinstance(Migrate.ddl, MysqlDDL): 148 | assert ret == "ALTER TABLE `product` ALTER COLUMN `view_num` SET DEFAULT 0" 149 | 150 | 151 | def test_alter_column_null(): 152 | if isinstance(Migrate.ddl, (SqliteDDL, MysqlDDL)): 153 | return 154 | ret = Migrate.ddl.alter_column_null(Category, Category._meta.fields_map["name"].describe(False)) 155 | if isinstance(Migrate.ddl, PostgresDDL): 156 | assert ret == 'ALTER TABLE "category" ALTER COLUMN "name" DROP NOT NULL' 157 | 158 | 159 | def test_set_comment(): 160 | if isinstance(Migrate.ddl, (SqliteDDL, MysqlDDL)): 161 | return 162 | ret = Migrate.ddl.set_comment(Category, Category._meta.fields_map["name"].describe(False)) 163 | assert ret == 'COMMENT ON COLUMN "category"."name" IS NULL' 164 | 165 | ret = Migrate.ddl.set_comment(Category, Category._meta.fields_map["owner"].describe(False)) 166 | assert ret == 'COMMENT ON COLUMN "category"."owner_id" IS \'User\'' 167 | 168 | 169 | def test_drop_column(): 170 | ret = Migrate.ddl.drop_column(Category, "name") 171 | if isinstance(Migrate.ddl, MysqlDDL): 172 | assert ret == "ALTER TABLE `category` DROP COLUMN `name`" 173 | elif isinstance(Migrate.ddl, PostgresDDL): 174 | assert ret == 'ALTER TABLE "category" DROP COLUMN "name"' 175 | 176 | 177 | def test_add_index(): 178 | index = Migrate.ddl.add_index(Category, ["name"]) 179 | index_u = Migrate.ddl.add_index(Category, ["name"], True) 180 | if isinstance(Migrate.ddl, MysqlDDL): 181 | assert index == "ALTER TABLE `category` ADD INDEX `idx_category_name_8b0cb9` (`name`)" 182 | assert index_u == "ALTER TABLE `category` ADD UNIQUE INDEX `name` (`name`)" 183 | elif isinstance(Migrate.ddl, PostgresDDL): 184 | assert ( 185 | index == 'CREATE INDEX IF NOT EXISTS "idx_category_name_8b0cb9" ON "category" ("name")' 186 | ) 187 | assert ( 188 | index_u 189 | == 'CREATE UNIQUE INDEX IF NOT EXISTS "uid_category_name_8b0cb9" ON "category" ("name")' 190 | ) 191 | else: 192 | assert index == 'CREATE INDEX "idx_category_name_8b0cb9" ON "category" ("name")' 193 | assert index_u == 'CREATE UNIQUE INDEX "uid_category_name_8b0cb9" ON "category" ("name")' 194 | 195 | 196 | def test_drop_index(): 197 | ret = Migrate.ddl.drop_index(Category, ["name"]) 198 | ret_u = Migrate.ddl.drop_index(Category, ["name"], True) 199 | if isinstance(Migrate.ddl, MysqlDDL): 200 | assert ret == "ALTER TABLE `category` DROP INDEX `idx_category_name_8b0cb9`" 201 | assert ret_u == "ALTER TABLE `category` DROP INDEX `name`" 202 | else: 203 | assert ret == 'DROP INDEX IF EXISTS "idx_category_name_8b0cb9"' 204 | assert ret_u == 'DROP INDEX IF EXISTS "uid_category_name_8b0cb9"' 205 | 206 | 207 | def test_add_fk(): 208 | ret = Migrate.ddl.add_fk( 209 | Category, Category._meta.fields_map["owner"].describe(False), User.describe(False) 210 | ) 211 | if isinstance(Migrate.ddl, MysqlDDL): 212 | assert ( 213 | ret 214 | == "ALTER TABLE `category` ADD CONSTRAINT `fk_category_user_110d4c63` FOREIGN KEY (`owner_id`) REFERENCES `user` (`id`) ON DELETE CASCADE" 215 | ) 216 | else: 217 | assert ( 218 | ret 219 | == 'ALTER TABLE "category" ADD CONSTRAINT "fk_category_user_110d4c63" FOREIGN KEY ("owner_id") REFERENCES "user" ("id") ON DELETE CASCADE' 220 | ) 221 | 222 | 223 | def test_drop_fk(): 224 | ret = Migrate.ddl.drop_fk( 225 | Category, Category._meta.fields_map["owner"].describe(False), User.describe(False) 226 | ) 227 | if isinstance(Migrate.ddl, MysqlDDL): 228 | assert ret == "ALTER TABLE `category` DROP FOREIGN KEY `fk_category_user_110d4c63`" 229 | elif isinstance(Migrate.ddl, PostgresDDL): 230 | assert ret == 'ALTER TABLE "category" DROP CONSTRAINT IF EXISTS "fk_category_user_110d4c63"' 231 | else: 232 | assert ret == 'ALTER TABLE "category" DROP FOREIGN KEY "fk_category_user_110d4c63"' 233 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Aerich 2 | 3 | [![image](https://img.shields.io/pypi/v/aerich.svg?style=flat)](https://pypi.python.org/pypi/aerich) 4 | [![image](https://img.shields.io/github/license/tortoise/aerich)](https://github.com/tortoise/aerich) 5 | [![image](https://github.com/tortoise/aerich/workflows/pypi/badge.svg)](https://github.com/tortoise/aerich/actions?query=workflow:pypi) 6 | [![image](https://github.com/tortoise/aerich/workflows/ci/badge.svg)](https://github.com/tortoise/aerich/actions?query=workflow:ci) 7 | ![Python Versions](https://img.shields.io/pypi/pyversions/aerich) 8 | 9 | English | [Русский](./README_RU.md) 10 | 11 | ## Introduction 12 | 13 | Aerich is a database migrations tool for TortoiseORM, which is like alembic for SQLAlchemy, 14 | or like Django ORM with it\'s own migration solution. 15 | 16 | ## Install 17 | 18 | Just install from pypi: 19 | 20 | ```shell 21 | pip install "aerich[toml]" 22 | ``` 23 | 24 | Or install the latest version directly from *github* with the 25 | following command: 26 | 27 | ```shell 28 | pip install "aerich[toml] @git+https://github.com/tortoise/aerich" 29 | ``` 30 | 31 | ## Quick Start 32 | 33 | ```shell 34 | > aerich -h 35 | 36 | Usage: aerich [OPTIONS] COMMAND [ARGS]... 37 | 38 | Options: 39 | -V, --version Show the version and exit. 40 | -c, --config TEXT Config file. [default: pyproject.toml] 41 | --app TEXT Tortoise-ORM app name. 42 | -h, --help Show this message and exit. 43 | 44 | Commands: 45 | downgrade Downgrade to specified version. 46 | fix-migrations Fix migration files to include models state for aerich... 47 | heads Show current available heads in migrate location. 48 | history List all migrate items. 49 | init Init config file and generate root migrate location. 50 | init-db Generate schema and generate app migrate location. 51 | init-migrations Generate app migration folder and your first migration. 52 | inspectdb Introspects the database tables to standard output as... 53 | migrate Generate migrate changes file. 54 | upgrade Upgrade to specified version. 55 | ``` 56 | 57 | ## Usage 58 | 59 | You need to add `aerich.models` to your `Tortoise-ORM` config first. Example: 60 | 61 | ```python 62 | TORTOISE_ORM = { 63 | "connections": {"default": "mysql://root:123456@127.0.0.1:3306/test"}, 64 | "apps": { 65 | "models": { 66 | "models": ["tests.models", "aerich.models"], 67 | "default_connection": "default", 68 | }, 69 | }, 70 | } 71 | ``` 72 | 73 | ### Initialization 74 | 75 | ```shell 76 | > aerich init -h 77 | 78 | Usage: aerich init [OPTIONS] 79 | 80 | Init config file and generate root migrate location. 81 | 82 | Options: 83 | -t, --tortoise-orm TEXT Tortoise-ORM config module dict variable, like 84 | settings.TORTOISE_ORM. [required] 85 | --location TEXT Migrate store location. [default: ./migrations] 86 | -s, --src_folder TEXT Folder of the source, relative to the project root. 87 | -h, --help Show this message and exit. 88 | ``` 89 | 90 | Initialize the config file and migrations location: 91 | 92 | ```shell 93 | > aerich init -t tests.backends.mysql.TORTOISE_ORM 94 | 95 | Success create migrate location ./migrations 96 | Success write config to pyproject.toml 97 | ``` 98 | 99 | *Note*: aerich will import the config file when running init-db/migrate/upgrade/heads/history commands, so it is better to keep this file simple and clean. 100 | 101 | To apply per app migrations style(like Django), set the location option with a '{app}', such as: `--location "./{app}/migrations"` 102 | 103 | ### Init db 104 | 105 | ```shell 106 | > aerich init-db 107 | 108 | Success create app migrate location ./migrations/models 109 | Success generate schema for app "models" 110 | ``` 111 | 112 | If your Tortoise-ORM app is not the default `models`, you must specify the correct app via `--app`, 113 | e.g. `aerich --app other_models init-db`. 114 | 115 | ### Update models and make migrate 116 | 117 | ```shell 118 | > aerich migrate --name drop_column 119 | 120 | Success migrate 1_202029051520102929_drop_column.py 121 | ``` 122 | 123 | Format of migrate filename is 124 | `{version_num}_{datetime}_{name|update}.py`. 125 | 126 | If `aerich` guesses you are renaming a column, it will ask `Rename {old_column} to {new_column} [True]`. You can choose 127 | `True` to rename column without column drop, or choose `False` to drop the column then create. Note that the latter may 128 | lose data. 129 | 130 | If you need to manually write migration, you could generate empty file: 131 | 132 | ```shell 133 | > aerich migrate --name add_index --empty 134 | 135 | Success migrate 1_202326122220101229_add_index.py 136 | ``` 137 | 138 | ### Upgrade to latest version 139 | 140 | ```shell 141 | > aerich upgrade 142 | 143 | Success upgrade 1_202029051520102929_drop_column.py 144 | ``` 145 | 146 | Now your db is migrated to latest. 147 | 148 | ### Downgrade to specified version 149 | 150 | ```shell 151 | > aerich downgrade -h 152 | 153 | Usage: aerich downgrade [OPTIONS] 154 | 155 | Downgrade to specified version. 156 | 157 | Options: 158 | -v, --version INTEGER Specified version, default to last. [default: -1] 159 | -d, --delete Delete version files at the same time. [default: 160 | False] 161 | 162 | --yes Confirm the action without prompting. 163 | -h, --help Show this message and exit. 164 | ``` 165 | 166 | ```shell 167 | > aerich downgrade 168 | 169 | Success downgrade 1_202029051520102929_drop_column.py 170 | ``` 171 | 172 | Now your db is rolled back to the specified version. 173 | 174 | ### Show history 175 | 176 | ```shell 177 | > aerich history 178 | 179 | 1_202029051520102929_drop_column.py 180 | ``` 181 | 182 | ### Show heads to be migrated 183 | 184 | ```shell 185 | > aerich heads 186 | 187 | 1_202029051520102929_drop_column.py 188 | ``` 189 | 190 | ### Inspect db tables to TortoiseORM model 191 | 192 | Currently `inspectdb` support MySQL & Postgres & SQLite. 193 | 194 | ```shell 195 | Usage: aerich inspectdb [OPTIONS] 196 | 197 | Introspects the database tables to standard output as TortoiseORM model. 198 | 199 | Options: 200 | -t, --table TEXT Which tables to inspect. 201 | -h, --help Show this message and exit. 202 | ``` 203 | 204 | Inspect all tables and print to console: 205 | 206 | ```shell 207 | aerich --app models inspectdb 208 | ``` 209 | 210 | Inspect a specified table in the default app and redirect to `models.py`: 211 | 212 | ```shell 213 | aerich inspectdb -t user > models.py 214 | ``` 215 | 216 | For example, you table is: 217 | 218 | ```sql 219 | CREATE TABLE `test` 220 | ( 221 | `id` int NOT NULL AUTO_INCREMENT, 222 | `decimal` decimal(10, 2) NOT NULL, 223 | `date` date DEFAULT NULL, 224 | `datetime` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, 225 | `time` time DEFAULT NULL, 226 | `float` float DEFAULT NULL, 227 | `string` varchar(200) COLLATE utf8mb4_general_ci DEFAULT NULL, 228 | `tinyint` tinyint DEFAULT NULL, 229 | PRIMARY KEY (`id`), 230 | KEY `asyncmy_string_index` (`string`) 231 | ) ENGINE = InnoDB 232 | DEFAULT CHARSET = utf8mb4 233 | COLLATE = utf8mb4_general_ci 234 | ``` 235 | 236 | Now run `aerich inspectdb -t test` to see the generated model: 237 | 238 | ```python 239 | from tortoise import Model, fields 240 | 241 | 242 | class Test(Model): 243 | date = fields.DateField(null=True) 244 | datetime = fields.DatetimeField(auto_now=True) 245 | decimal = fields.DecimalField(max_digits=10, decimal_places=2) 246 | float = fields.FloatField(null=True) 247 | id = fields.IntField(primary_key=True) 248 | string = fields.CharField(max_length=200, null=True) 249 | time = fields.TimeField(null=True) 250 | tinyint = fields.BooleanField(null=True) 251 | ``` 252 | 253 | Note that this command is limited and can't infer some fields, such as `IntEnumField`, `ForeignKeyField`, and others. 254 | 255 | ### Multiple databases 256 | 257 | ```python 258 | tortoise_orm = { 259 | "connections": { 260 | "default": "postgres://postgres_user:postgres_pass@127.0.0.1:5432/db1", 261 | "second": "postgres://postgres_user:postgres_pass@127.0.0.1:5432/db2", 262 | }, 263 | "apps": { 264 | "models": {"models": ["tests.models", "aerich.models"], "default_connection": "default"}, 265 | "models_second": {"models": ["tests.models_second"], "default_connection": "second", }, 266 | }, 267 | } 268 | ``` 269 | 270 | You only need to specify `aerich.models` in one app, and must specify `--app` when running `aerich migrate` and so on, e.g. `aerich --app models_second migrate`. 271 | 272 | ## Restore `aerich` workflow 273 | 274 | In some cases, such as broken changes from upgrade of `aerich`, you can't run `aerich migrate` or `aerich upgrade`, you 275 | can make the following steps: 276 | 277 | 1. drop `aerich` table. 278 | 2. delete `migrations/{app}` directory. 279 | 3. rerun `aerich init-db`. 280 | 281 | Note that these actions is safe, also you can do that to reset your migrations if your migration files is too many. 282 | 283 | ## Use `aerich` in application 284 | 285 | You can use `aerich` out of cli by use `Command` class. 286 | 287 | ```python 288 | from aerich import Command 289 | from aerich.utils import load_tortoise_config 290 | 291 | async with Command(tortoise_config=load_tortoise_config(), app='models') as command: 292 | await command.migrate('test') 293 | await command.upgrade() 294 | print(await command.history()) 295 | ``` 296 | 297 | ## Upgrade/Downgrade with `--fake` option 298 | 299 | Marks the migrations up to the latest one(or back to the target one) as applied, but without actually running the SQL to change your database schema. 300 | 301 | - Upgrade 302 | 303 | ```bash 304 | aerich upgrade --fake 305 | aerich --app models upgrade --fake 306 | ``` 307 | - Downgrade 308 | 309 | ```bash 310 | aerich downgrade --fake -v 2 311 | aerich --app models downgrade --fake -v 2 312 | ``` 313 | 314 | ### Ignore tables 315 | 316 | You can tell aerich to ignore table by setting `managed=False` in the `Meta` class, e.g.: 317 | ```py 318 | class MyModel(Model): 319 | class Meta: 320 | managed = False 321 | ``` 322 | **Note** `managed=False` does not recognized by `tortoise-orm` and `aerich init-db`, it is only for `aerich migrate`. 323 | 324 | ## License 325 | 326 | This project is licensed under the 327 | [Apache-2.0](https://github.com/long2ice/aerich/blob/master/LICENSE) License. 328 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright 2020 long2ice 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /aerich/__init__.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import pkgutil 4 | import warnings 5 | from collections.abc import Generator 6 | from contextlib import AbstractAsyncContextManager 7 | from pathlib import Path 8 | from typing import TYPE_CHECKING, Any, Literal, overload 9 | 10 | import asyncclick as click 11 | from tortoise import BaseDBAsyncClient, Tortoise, connections 12 | from tortoise.exceptions import OperationalError 13 | from tortoise.transactions import in_transaction 14 | from tortoise.utils import generate_schema_for_client, get_schema_sql 15 | 16 | from aerich._compat import _init_asyncio_patch, _init_tortoise_0_24_1_patch 17 | from aerich.exceptions import DowngradeError, NotInitedError 18 | from aerich.inspectdb.mysql import InspectMySQL 19 | from aerich.inspectdb.postgres import InspectPostgres 20 | from aerich.inspectdb.sqlite import InspectSQLite 21 | from aerich.migrate import Migrate 22 | from aerich.models import Aerich 23 | from aerich.utils import ( 24 | decompress_dict, 25 | file_module_info, 26 | get_app_connection, 27 | get_app_connection_name, 28 | get_models_describe, 29 | import_py_file, 30 | import_py_module, 31 | load_tortoise_config, 32 | py_module_path, 33 | ) 34 | from aerich.version import __version__ 35 | 36 | if TYPE_CHECKING: 37 | from aerich._compat import Self 38 | from aerich.inspectdb import Inspect 39 | 40 | 41 | _init_asyncio_patch() # Change event_loop_policy for Windows 42 | _init_tortoise_0_24_1_patch() # Patch m2m table generator for tortoise-orm==0.24.1 43 | __all__ = ("Command", "TortoiseContext", "__version__") 44 | 45 | 46 | class TortoiseContext(AbstractAsyncContextManager): 47 | def __init__(self, tortoise_config: dict | None = None) -> None: 48 | if tortoise_config is None: 49 | tortoise_config = load_tortoise_config() 50 | self.tortoise_config = tortoise_config 51 | self._init_when_aenter = True 52 | 53 | async def init(self) -> None: 54 | await Tortoise.init(config=self.tortoise_config) 55 | 56 | async def __aenter__(self) -> Self: 57 | if self._init_when_aenter: 58 | await self.init() 59 | return self 60 | 61 | def __await__(self) -> Generator[Any, None, Self]: 62 | # To support `command = await Command(tortoise_config)` 63 | async def _self() -> Self: 64 | return await self.__aenter__() 65 | 66 | return _self().__await__() 67 | 68 | @staticmethod 69 | async def aclose() -> None: 70 | """Close tortoise connections if it was inited""" 71 | if Tortoise._inited: 72 | await connections.close_all() 73 | 74 | async def __aexit__(self, *args, **kw) -> None: 75 | await self.aclose() 76 | 77 | 78 | class Command(TortoiseContext): 79 | def __init__( 80 | self, 81 | tortoise_config: dict, 82 | app: str = "models", 83 | location: str = "./migrations", 84 | inspectdb_fields: dict[str, str] | None = None, 85 | ) -> None: 86 | super().__init__(tortoise_config) 87 | self.app = app 88 | self.location = location 89 | self._inspectdb_fields = inspectdb_fields 90 | Migrate.app = app 91 | 92 | async def init(self, offline: bool = False) -> None: 93 | await Migrate.init(self.tortoise_config, self.app, self.location, offline=offline) 94 | 95 | async def close(self) -> None: 96 | warnings.warn( 97 | "`Command.close()` is deprecated, please use Command.aclose() instead", 98 | DeprecationWarning, 99 | stacklevel=2, 100 | ) 101 | await self.aclose() 102 | 103 | async def _upgrade( 104 | self, 105 | conn: BaseDBAsyncClient, 106 | version_file: str, 107 | fake: bool = False, 108 | version_module: pkgutil.ModuleInfo | None = None, 109 | ) -> None: 110 | if version_module is not None: 111 | m = import_py_module(version_module) 112 | else: 113 | m = import_py_file(Path(Migrate.migrate_location, version_file)) 114 | upgrade = m.upgrade 115 | if not fake: 116 | await conn.execute_script(await upgrade(conn)) 117 | 118 | model_state_str = getattr(m, "MODELS_STATE", None) 119 | models_state = ( 120 | decompress_dict(model_state_str) if model_state_str else get_models_describe(self.app) 121 | ) 122 | await Aerich.create(version=version_file, app=self.app, content=models_state) 123 | 124 | async def upgrade(self, run_in_transaction: bool = True, fake: bool = False) -> list[str]: 125 | migrated = [] 126 | for version_module in Migrate.get_all_version_modules(): 127 | version_file = version_module.name + ".py" 128 | try: 129 | exists = await Aerich.exists(version=version_file, app=self.app) 130 | except OperationalError: 131 | exists = False 132 | if not exists: 133 | app_conn_name = get_app_connection_name(self.tortoise_config, self.app) 134 | m = import_py_module(version_module) 135 | migration_run_in_transaction = getattr(m, "RUN_IN_TRANSACTION", run_in_transaction) 136 | if migration_run_in_transaction: 137 | async with in_transaction(app_conn_name) as conn: 138 | await self._upgrade(conn, version_file, fake, version_module) 139 | else: 140 | app_conn = get_app_connection(self.tortoise_config, self.app) 141 | await self._upgrade(app_conn, version_file, fake, version_module) 142 | migrated.append(version_file) 143 | return migrated 144 | 145 | async def downgrade(self, version: int, delete: bool, fake: bool = False) -> list[str]: 146 | ret: list[str] = [] 147 | if version == -1: 148 | specified_version = await Migrate.get_last_version() 149 | else: 150 | specified_version = await Aerich.filter( 151 | app=self.app, version__startswith=f"{version}_" 152 | ).first() 153 | if not specified_version: 154 | raise DowngradeError("No specified version found") 155 | if version == -1: 156 | versions = [specified_version] 157 | else: 158 | versions = await Aerich.filter(app=self.app, pk__gte=specified_version.pk) 159 | for version_obj in versions: 160 | file = version_obj.version 161 | async with in_transaction( 162 | get_app_connection_name(self.tortoise_config, self.app) 163 | ) as conn: 164 | module_info = file_module_info(Migrate.migrate_location, Path(file).stem) 165 | m = import_py_module(module_info) 166 | downgrade = m.downgrade 167 | downgrade_sql = await downgrade(conn) 168 | if not downgrade_sql.strip(): 169 | raise DowngradeError("No downgrade items found") 170 | if not fake: 171 | await conn.execute_script(downgrade_sql) 172 | await version_obj.delete() 173 | if delete: 174 | py_module_path(module_info).unlink() 175 | ret.append(file) 176 | return ret 177 | 178 | async def heads(self) -> list[str]: 179 | ret = [] 180 | versions = Migrate.get_all_version_files() 181 | for version in versions: 182 | if not await Aerich.exists(version=version, app=self.app): 183 | ret.append(version) 184 | return ret 185 | 186 | async def history(self) -> list[str]: 187 | versions = Migrate.get_all_version_files() 188 | return [version for version in versions] 189 | 190 | async def inspectdb(self, tables: list[str] | None = None) -> str: 191 | connection = get_app_connection(self.tortoise_config, self.app) 192 | dialect = connection.schema_generator.DIALECT 193 | if dialect == "mysql": 194 | cls: type[Inspect] = InspectMySQL 195 | elif dialect == "postgres": 196 | cls = InspectPostgres 197 | elif dialect == "sqlite": 198 | cls = InspectSQLite 199 | else: 200 | raise NotImplementedError(f"{dialect} is not supported") 201 | inspect = cls(connection, tables) 202 | if self._inspectdb_fields: 203 | inspect._special_fields = self._inspectdb_fields 204 | return await inspect.inspect() 205 | 206 | @overload 207 | async def migrate( 208 | self, 209 | name: str = "update", 210 | empty: bool = False, 211 | no_input: Literal[True] = True, 212 | offline: bool = False, 213 | ) -> str: ... 214 | 215 | @overload 216 | async def migrate( 217 | self, 218 | name: str = "update", 219 | empty: bool = False, 220 | no_input: bool = False, 221 | offline: bool = False, 222 | ) -> str | None: ... 223 | 224 | async def migrate( 225 | self, 226 | name: str = "update", 227 | empty: bool = False, 228 | no_input: bool = False, 229 | offline: bool = False, 230 | ) -> str | None: 231 | # return None if same version migration file already exists, and new one not generated 232 | try: 233 | return await Migrate.migrate(name, empty, no_input, offline) 234 | except NotInitedError as e: 235 | raise NotInitedError("You have to call .init() first before migrate") from e 236 | 237 | async def init_db(self, safe: bool, pre_sql: str | None = None) -> None: 238 | await self._do_init(safe, pre_sql) 239 | 240 | async def _do_init(self, safe: bool, pre_sql: str | None = None, offline: bool = False) -> None: 241 | location = self.location 242 | app = self.app 243 | config = self.tortoise_config 244 | 245 | await Tortoise.init(config=config) 246 | connection = get_app_connection(config, app) 247 | if offline: 248 | await Migrate.init(config, app, location, offline=True) 249 | elif pre_sql: 250 | await connection.execute_script(pre_sql) 251 | 252 | dirname = Migrate.get_migration_dir(location, app) 253 | if not dirname.exists(): 254 | dirname.mkdir(parents=True) 255 | else: 256 | # If directory is empty, go ahead, otherwise raise FileExistsError 257 | for unexpected_file in dirname.glob("*"): 258 | raise FileExistsError(str(unexpected_file)) 259 | schema = get_schema_sql(connection, safe) 260 | 261 | version = await Migrate.generate_version(offline=offline) 262 | aerich_content = get_models_describe(app) 263 | version_file = Path(dirname, version) 264 | content = Migrate.build_migration_file_text(upgrade_sql=schema, models_state=aerich_content) 265 | version_file.write_text(content, encoding="utf-8") 266 | Migrate._last_version_content = aerich_content 267 | if not offline: 268 | await generate_schema_for_client(connection, safe) 269 | await Aerich.create(version=version, app=app, content=aerich_content) 270 | 271 | async def init_migrations(self, safe: bool) -> None: 272 | await self._do_init(safe, offline=True) 273 | 274 | async def fix_migrations(self) -> list[str] | None: 275 | """ 276 | Fix migration files to include models state for aerich 0.6.0+ 277 | :return: List of updated migration files (if no migration file or no aerich objects will return None) 278 | """ 279 | Migrate.app = self.app 280 | Migrate.migrate_location = Migrate.get_migration_dir(self.location, self.app) 281 | return await Migrate.fix_migrations(self.tortoise_config) 282 | 283 | @staticmethod 284 | async def get_applied_migrations(self, app: str | None = None) -> list[str]: 285 | """ 286 | Get applied migrations by query the 'aerich' table 287 | 288 | :param app: if None, query all app 289 | :return: List of migration files 290 | """ 291 | qs = Aerich.all() 292 | if app is not None: 293 | qs = qs.filter(app=app) 294 | return await qs.values_list("version", flat=True) # type:ignore[return-value] 295 | 296 | @classmethod 297 | def list_applied(cls, app: str | None = None) -> None: 298 | @click.command 299 | async def display() -> None: 300 | async with TortoiseContext(): 301 | applied = await cls.get_applied_migrations(app) 302 | if applied: 303 | click.echo(click.style("Applied Migrations:", bold=True)) 304 | for migration_file in applied: 305 | click.echo(migration_file) 306 | else: 307 | click.echo("No applied migration for " + ("all apps" if app is None else f"{app=}")) 308 | 309 | display() 310 | -------------------------------------------------------------------------------- /tests/assets/sqlite_old_style/data.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "version": "0_20250405163033_init.py", 4 | "content": { 5 | "models.Foo": { 6 | "name": "models.Foo", 7 | "app": "models", 8 | "table": "foo", 9 | "abstract": false, 10 | "description": null, 11 | "docstring": null, 12 | "unique_together": [], 13 | "indexes": [], 14 | "pk_field": { 15 | "name": "id", 16 | "field_type": "IntField", 17 | "db_column": "id", 18 | "python_type": "int", 19 | "generated": true, 20 | "nullable": false, 21 | "unique": true, 22 | "indexed": true, 23 | "default": null, 24 | "description": null, 25 | "docstring": null, 26 | "constraints": { 27 | "ge": -2147483648, 28 | "le": 2147483647 29 | }, 30 | "db_field_types": { 31 | "": "INT" 32 | } 33 | }, 34 | "data_fields": [ 35 | { 36 | "name": "name", 37 | "field_type": "CharField", 38 | "db_column": "name", 39 | "python_type": "str", 40 | "generated": false, 41 | "nullable": false, 42 | "unique": false, 43 | "indexed": false, 44 | "default": null, 45 | "description": null, 46 | "docstring": null, 47 | "constraints": { 48 | "max_length": 60 49 | }, 50 | "db_field_types": { 51 | "": "VARCHAR(60)", 52 | "oracle": "NVARCHAR2(60)" 53 | } 54 | } 55 | ], 56 | "fk_fields": [], 57 | "backward_fk_fields": [], 58 | "o2o_fields": [], 59 | "backward_o2o_fields": [], 60 | "m2m_fields": [], 61 | "managed": null 62 | }, 63 | "models.Aerich": { 64 | "name": "models.Aerich", 65 | "app": "models", 66 | "table": "aerich", 67 | "abstract": false, 68 | "description": null, 69 | "docstring": null, 70 | "unique_together": [], 71 | "indexes": [], 72 | "pk_field": { 73 | "name": "id", 74 | "field_type": "IntField", 75 | "db_column": "id", 76 | "python_type": "int", 77 | "generated": true, 78 | "nullable": false, 79 | "unique": true, 80 | "indexed": true, 81 | "default": null, 82 | "description": null, 83 | "docstring": null, 84 | "constraints": { 85 | "ge": -2147483648, 86 | "le": 2147483647 87 | }, 88 | "db_field_types": { 89 | "": "INT" 90 | } 91 | }, 92 | "data_fields": [ 93 | { 94 | "name": "version", 95 | "field_type": "CharField", 96 | "db_column": "version", 97 | "python_type": "str", 98 | "generated": false, 99 | "nullable": false, 100 | "unique": false, 101 | "indexed": false, 102 | "default": null, 103 | "description": null, 104 | "docstring": null, 105 | "constraints": { 106 | "max_length": 255 107 | }, 108 | "db_field_types": { 109 | "": "VARCHAR(255)", 110 | "oracle": "NVARCHAR2(255)" 111 | } 112 | }, 113 | { 114 | "name": "app", 115 | "field_type": "CharField", 116 | "db_column": "app", 117 | "python_type": "str", 118 | "generated": false, 119 | "nullable": false, 120 | "unique": false, 121 | "indexed": false, 122 | "default": null, 123 | "description": null, 124 | "docstring": null, 125 | "constraints": { 126 | "max_length": 100 127 | }, 128 | "db_field_types": { 129 | "": "VARCHAR(100)", 130 | "oracle": "NVARCHAR2(100)" 131 | } 132 | }, 133 | { 134 | "name": "content", 135 | "field_type": "JSONField", 136 | "db_column": "content", 137 | "python_type": "Union[dict, list]", 138 | "generated": false, 139 | "nullable": false, 140 | "unique": false, 141 | "indexed": false, 142 | "default": null, 143 | "description": null, 144 | "docstring": null, 145 | "constraints": {}, 146 | "db_field_types": { 147 | "": "JSON", 148 | "mssql": "NVARCHAR(MAX)", 149 | "oracle": "NCLOB", 150 | "postgres": "JSONB" 151 | } 152 | } 153 | ], 154 | "fk_fields": [], 155 | "backward_fk_fields": [], 156 | "o2o_fields": [], 157 | "backward_o2o_fields": [], 158 | "m2m_fields": [], 159 | "managed": null 160 | } 161 | }, 162 | "app": "models" 163 | }, 164 | { 165 | "version": "1_20250405163135_update.py", 166 | "content": { 167 | "models.Foo": { 168 | "name": "models.Foo", 169 | "app": "models", 170 | "table": "foo", 171 | "abstract": false, 172 | "description": null, 173 | "docstring": null, 174 | "unique_together": [], 175 | "indexes": [], 176 | "pk_field": { 177 | "name": "id", 178 | "field_type": "IntField", 179 | "db_column": "id", 180 | "python_type": "int", 181 | "generated": true, 182 | "nullable": false, 183 | "unique": true, 184 | "indexed": true, 185 | "default": null, 186 | "description": null, 187 | "docstring": null, 188 | "constraints": { 189 | "ge": -2147483648, 190 | "le": 2147483647 191 | }, 192 | "db_field_types": { 193 | "": "INT" 194 | } 195 | }, 196 | "data_fields": [ 197 | { 198 | "name": "name", 199 | "field_type": "CharField", 200 | "db_column": "name", 201 | "python_type": "str", 202 | "generated": false, 203 | "nullable": false, 204 | "unique": false, 205 | "indexed": true, 206 | "default": null, 207 | "description": null, 208 | "docstring": null, 209 | "constraints": { 210 | "max_length": 60 211 | }, 212 | "db_field_types": { 213 | "": "VARCHAR(60)", 214 | "oracle": "NVARCHAR2(60)" 215 | } 216 | } 217 | ], 218 | "fk_fields": [], 219 | "backward_fk_fields": [], 220 | "o2o_fields": [], 221 | "backward_o2o_fields": [], 222 | "m2m_fields": [], 223 | "managed": null 224 | }, 225 | "models.Aerich": { 226 | "name": "models.Aerich", 227 | "app": "models", 228 | "table": "aerich", 229 | "abstract": false, 230 | "description": null, 231 | "docstring": null, 232 | "unique_together": [], 233 | "indexes": [], 234 | "pk_field": { 235 | "name": "id", 236 | "field_type": "IntField", 237 | "db_column": "id", 238 | "python_type": "int", 239 | "generated": true, 240 | "nullable": false, 241 | "unique": true, 242 | "indexed": true, 243 | "default": null, 244 | "description": null, 245 | "docstring": null, 246 | "constraints": { 247 | "ge": -2147483648, 248 | "le": 2147483647 249 | }, 250 | "db_field_types": { 251 | "": "INT" 252 | } 253 | }, 254 | "data_fields": [ 255 | { 256 | "name": "version", 257 | "field_type": "CharField", 258 | "db_column": "version", 259 | "python_type": "str", 260 | "generated": false, 261 | "nullable": false, 262 | "unique": false, 263 | "indexed": false, 264 | "default": null, 265 | "description": null, 266 | "docstring": null, 267 | "constraints": { 268 | "max_length": 255 269 | }, 270 | "db_field_types": { 271 | "": "VARCHAR(255)", 272 | "oracle": "NVARCHAR2(255)" 273 | } 274 | }, 275 | { 276 | "name": "app", 277 | "field_type": "CharField", 278 | "db_column": "app", 279 | "python_type": "str", 280 | "generated": false, 281 | "nullable": false, 282 | "unique": false, 283 | "indexed": false, 284 | "default": null, 285 | "description": null, 286 | "docstring": null, 287 | "constraints": { 288 | "max_length": 100 289 | }, 290 | "db_field_types": { 291 | "": "VARCHAR(100)", 292 | "oracle": "NVARCHAR2(100)" 293 | } 294 | }, 295 | { 296 | "name": "content", 297 | "field_type": "JSONField", 298 | "db_column": "content", 299 | "python_type": "Union[dict, list]", 300 | "generated": false, 301 | "nullable": false, 302 | "unique": false, 303 | "indexed": false, 304 | "default": null, 305 | "description": null, 306 | "docstring": null, 307 | "constraints": {}, 308 | "db_field_types": { 309 | "": "JSON", 310 | "mssql": "NVARCHAR(MAX)", 311 | "oracle": "NCLOB", 312 | "postgres": "JSONB" 313 | } 314 | } 315 | ], 316 | "fk_fields": [], 317 | "backward_fk_fields": [], 318 | "o2o_fields": [], 319 | "backward_o2o_fields": [], 320 | "m2m_fields": [], 321 | "managed": null 322 | } 323 | }, 324 | "app": "models" 325 | } 326 | ] --------------------------------------------------------------------------------