├── tests ├── __init__.py ├── test_utils.py ├── models_second.py ├── old_models.py ├── models.py ├── test_ddl.py └── test_migrate.py ├── aerich ├── version.py ├── enums.py ├── exceptions.py ├── models.py ├── coder.py ├── ddl │ ├── sqlite │ │ └── __init__.py │ ├── mysql │ │ └── __init__.py │ ├── postgres │ │ └── __init__.py │ └── __init__.py ├── inspectdb │ ├── sqlite.py │ ├── mysql.py │ ├── postgres.py │ └── __init__.py ├── utils.py ├── __init__.py ├── cli.py └── migrate.py ├── .github ├── FUNDING.yml └── workflows │ ├── pypi.yml │ └── ci.yml ├── Makefile ├── pyproject.toml ├── conftest.py ├── .gitignore ├── CHANGELOG.md ├── README.md ├── README_RU.md └── LICENSE /tests/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /aerich/version.py: -------------------------------------------------------------------------------- 1 | __version__ = "0.7.2" 2 | -------------------------------------------------------------------------------- /.github/FUNDING.yml: -------------------------------------------------------------------------------- 1 | custom: ["https://sponsor.long2ice.io"] 2 | -------------------------------------------------------------------------------- /aerich/enums.py: -------------------------------------------------------------------------------- 1 | from enum import Enum 2 | 3 | 4 | class Color(str, Enum): 5 | green = "green" 6 | red = "red" 7 | yellow = "yellow" 8 | -------------------------------------------------------------------------------- /tests/test_utils.py: -------------------------------------------------------------------------------- 1 | from aerich.utils import import_py_file 2 | 3 | 4 | def test_import_py_file(): 5 | m = import_py_file("aerich/utils.py") 6 | assert getattr(m, "import_py_file") 7 | -------------------------------------------------------------------------------- /aerich/exceptions.py: -------------------------------------------------------------------------------- 1 | class NotSupportError(Exception): 2 | """ 3 | raise when features not support 4 | """ 5 | 6 | 7 | class DowngradeError(Exception): 8 | """ 9 | raise when downgrade error 10 | """ 11 | -------------------------------------------------------------------------------- /aerich/models.py: -------------------------------------------------------------------------------- 1 | from tortoise import Model, fields 2 | 3 | from aerich.coder import decoder, encoder 4 | 5 | MAX_VERSION_LENGTH = 255 6 | MAX_APP_LENGTH = 100 7 | 8 | 9 | class Aerich(Model): 10 | version = fields.CharField(max_length=MAX_VERSION_LENGTH) 11 | app = fields.CharField(max_length=MAX_APP_LENGTH) 12 | content = fields.JSONField(encoder=encoder, decoder=decoder) 13 | 14 | class Meta: 15 | ordering = ["-id"] 16 | -------------------------------------------------------------------------------- /.github/workflows/pypi.yml: -------------------------------------------------------------------------------- 1 | name: pypi 2 | on: 3 | release: 4 | types: 5 | - created 6 | jobs: 7 | publish: 8 | runs-on: ubuntu-latest 9 | steps: 10 | - uses: actions/checkout@v2 11 | - uses: actions/setup-python@v2 12 | with: 13 | python-version: '3.x' 14 | - name: Install and configure Poetry 15 | run: | 16 | pip install -U pip poetry 17 | poetry config virtualenvs.create false 18 | - name: Build dists 19 | run: make build 20 | - name: Pypi Publish 21 | uses: pypa/gh-action-pypi-publish@master 22 | with: 23 | user: __token__ 24 | password: ${{ secrets.pypi_password }} 25 | -------------------------------------------------------------------------------- /aerich/coder.py: -------------------------------------------------------------------------------- 1 | import base64 2 | import json 3 | import pickle # nosec: B301,B403 4 | 5 | from tortoise.indexes import Index 6 | 7 | 8 | class JsonEncoder(json.JSONEncoder): 9 | def default(self, obj): 10 | if isinstance(obj, Index): 11 | return { 12 | "type": "index", 13 | "val": base64.b64encode(pickle.dumps(obj)).decode(), # nosec: B301 14 | } 15 | else: 16 | return super().default(obj) 17 | 18 | 19 | def object_hook(obj): 20 | _type = obj.get("type") 21 | if not _type: 22 | return obj 23 | return pickle.loads(base64.b64decode(obj["val"])) # nosec: B301 24 | 25 | 26 | def encoder(obj: dict): 27 | return json.dumps(obj, cls=JsonEncoder) 28 | 29 | 30 | def decoder(obj: str): 31 | return json.loads(obj, object_hook=object_hook) 32 | -------------------------------------------------------------------------------- /aerich/ddl/sqlite/__init__.py: -------------------------------------------------------------------------------- 1 | from typing import Type 2 | 3 | from tortoise import Model 4 | from tortoise.backends.sqlite.schema_generator import SqliteSchemaGenerator 5 | 6 | from aerich.ddl import BaseDDL 7 | from aerich.exceptions import NotSupportError 8 | 9 | 10 | class SqliteDDL(BaseDDL): 11 | schema_generator_cls = SqliteSchemaGenerator 12 | DIALECT = SqliteSchemaGenerator.DIALECT 13 | 14 | def modify_column(self, model: "Type[Model]", field_object: dict, is_pk: bool = True): 15 | raise NotSupportError("Modify column is unsupported in SQLite.") 16 | 17 | def alter_column_default(self, model: "Type[Model]", field_describe: dict): 18 | raise NotSupportError("Alter column default is unsupported in SQLite.") 19 | 20 | def alter_column_null(self, model: "Type[Model]", field_describe: dict): 21 | raise NotSupportError("Alter column null is unsupported in SQLite.") 22 | 23 | def set_comment(self, model: "Type[Model]", field_describe: dict): 24 | raise NotSupportError("Alter column comment is unsupported in SQLite.") 25 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | checkfiles = aerich/ tests/ conftest.py 2 | black_opts = -l 100 -t py38 3 | py_warn = PYTHONDEVMODE=1 4 | MYSQL_HOST ?= "127.0.0.1" 5 | MYSQL_PORT ?= 3306 6 | MYSQL_PASS ?= "123456" 7 | POSTGRES_HOST ?= "127.0.0.1" 8 | POSTGRES_PORT ?= 5432 9 | POSTGRES_PASS ?= "123456" 10 | 11 | up: 12 | @poetry update 13 | 14 | deps: 15 | @poetry install -E asyncpg -E asyncmy 16 | 17 | style: deps 18 | @isort -src $(checkfiles) 19 | @black $(black_opts) $(checkfiles) 20 | 21 | check: deps 22 | @black --check $(black_opts) $(checkfiles) || (echo "Please run 'make style' to auto-fix style issues" && false) 23 | @ruff $(checkfiles) 24 | 25 | test: deps 26 | $(py_warn) TEST_DB=sqlite://:memory: py.test 27 | 28 | test_sqlite: 29 | $(py_warn) TEST_DB=sqlite://:memory: py.test 30 | 31 | test_mysql: 32 | $(py_warn) TEST_DB="mysql://root:$(MYSQL_PASS)@$(MYSQL_HOST):$(MYSQL_PORT)/test_\{\}" pytest -vv -s 33 | 34 | test_postgres: 35 | $(py_warn) TEST_DB="postgres://postgres:$(POSTGRES_PASS)@$(POSTGRES_HOST):$(POSTGRES_PORT)/test_\{\}" pytest -vv -s 36 | 37 | testall: deps test_sqlite test_postgres test_mysql 38 | 39 | build: deps 40 | @poetry build 41 | 42 | ci: check testall 43 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: ci 2 | on: 3 | push: 4 | branches-ignore: 5 | - main 6 | pull_request: 7 | branches-ignore: 8 | - main 9 | jobs: 10 | ci: 11 | runs-on: ubuntu-latest 12 | services: 13 | postgres: 14 | image: postgres:latest 15 | ports: 16 | - 5432:5432 17 | env: 18 | POSTGRES_PASSWORD: 123456 19 | POSTGRES_USER: postgres 20 | options: --health-cmd=pg_isready --health-interval 10s --health-timeout 5s --health-retries 5 21 | steps: 22 | - name: Start MySQL 23 | run: sudo systemctl start mysql.service 24 | - uses: actions/checkout@v2 25 | - uses: actions/setup-python@v2 26 | with: 27 | python-version: '3.x' 28 | - name: Install and configure Poetry 29 | run: | 30 | pip install -U pip poetry 31 | poetry config virtualenvs.create false 32 | - name: CI 33 | env: 34 | MYSQL_PASS: root 35 | MYSQL_HOST: 127.0.0.1 36 | MYSQL_PORT: 3306 37 | POSTGRES_PASS: 123456 38 | POSTGRES_HOST: 127.0.0.1 39 | POSTGRES_PORT: 5432 40 | run: make ci 41 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.poetry] 2 | name = "aerich" 3 | version = "0.7.2" 4 | description = "A database migrations tool for Tortoise ORM." 5 | authors = ["long2ice "] 6 | license = "Apache-2.0" 7 | readme = "README.md" 8 | homepage = "https://github.com/tortoise/aerich" 9 | repository = "https://github.com/tortoise/aerich.git" 10 | documentation = "https://github.com/tortoise/aerich" 11 | keywords = ["migrate", "Tortoise-ORM", "mysql"] 12 | packages = [ 13 | { include = "aerich" } 14 | ] 15 | include = ["CHANGELOG.md", "LICENSE", "README.md"] 16 | 17 | [tool.poetry.dependencies] 18 | python = "^3.7" 19 | tortoise-orm = "*" 20 | click = "*" 21 | asyncpg = { version = "*", optional = true } 22 | asyncmy = { version = "^0.2.8rc1", optional = true, allow-prereleases = true } 23 | pydantic = "*" 24 | dictdiffer = "*" 25 | tomlkit = "*" 26 | 27 | [tool.poetry.dev-dependencies] 28 | ruff = "*" 29 | isort = "*" 30 | black = "*" 31 | pytest = "*" 32 | pytest-xdist = "*" 33 | pytest-asyncio = "*" 34 | bandit = "*" 35 | pytest-mock = "*" 36 | cryptography = "*" 37 | 38 | [tool.poetry.extras] 39 | asyncmy = ["asyncmy"] 40 | asyncpg = ["asyncpg"] 41 | 42 | [tool.aerich] 43 | tortoise_orm = "conftest.tortoise_orm" 44 | location = "./migrations" 45 | src_folder = "./." 46 | 47 | [build-system] 48 | requires = ["poetry>=0.12"] 49 | build-backend = "poetry.masonry.api" 50 | 51 | [tool.poetry.scripts] 52 | aerich = "aerich.cli:main" 53 | 54 | [tool.black] 55 | line-length = 100 56 | target-version = ['py36', 'py37', 'py38', 'py39'] 57 | 58 | [tool.pytest.ini_options] 59 | asyncio_mode = 'auto' 60 | 61 | [tool.mypy] 62 | pretty = true 63 | ignore_missing_imports = true 64 | 65 | [tool.ruff] 66 | ignore = ['E501'] 67 | -------------------------------------------------------------------------------- /aerich/ddl/mysql/__init__.py: -------------------------------------------------------------------------------- 1 | from tortoise.backends.mysql.schema_generator import MySQLSchemaGenerator 2 | 3 | from aerich.ddl import BaseDDL 4 | 5 | 6 | class MysqlDDL(BaseDDL): 7 | schema_generator_cls = MySQLSchemaGenerator 8 | DIALECT = MySQLSchemaGenerator.DIALECT 9 | _DROP_TABLE_TEMPLATE = "DROP TABLE IF EXISTS `{table_name}`" 10 | _ADD_COLUMN_TEMPLATE = "ALTER TABLE `{table_name}` ADD {column}" 11 | _ALTER_DEFAULT_TEMPLATE = "ALTER TABLE `{table_name}` ALTER COLUMN `{column}` {default}" 12 | _CHANGE_COLUMN_TEMPLATE = ( 13 | "ALTER TABLE `{table_name}` CHANGE {old_column_name} {new_column_name} {new_column_type}" 14 | ) 15 | _DROP_COLUMN_TEMPLATE = "ALTER TABLE `{table_name}` DROP COLUMN `{column_name}`" 16 | _RENAME_COLUMN_TEMPLATE = ( 17 | "ALTER TABLE `{table_name}` RENAME COLUMN `{old_column_name}` TO `{new_column_name}`" 18 | ) 19 | _ADD_INDEX_TEMPLATE = ( 20 | "ALTER TABLE `{table_name}` ADD {unique}INDEX `{index_name}` ({column_names})" 21 | ) 22 | _DROP_INDEX_TEMPLATE = "ALTER TABLE `{table_name}` DROP INDEX `{index_name}`" 23 | _ADD_FK_TEMPLATE = "ALTER TABLE `{table_name}` ADD CONSTRAINT `{fk_name}` FOREIGN KEY (`{db_column}`) REFERENCES `{table}` (`{field}`) ON DELETE {on_delete}" 24 | _DROP_FK_TEMPLATE = "ALTER TABLE `{table_name}` DROP FOREIGN KEY `{fk_name}`" 25 | _M2M_TABLE_TEMPLATE = ( 26 | "CREATE TABLE `{table_name}` (\n" 27 | " `{backward_key}` {backward_type} NOT NULL REFERENCES `{backward_table}` (`{backward_field}`) ON DELETE CASCADE,\n" 28 | " `{forward_key}` {forward_type} NOT NULL REFERENCES `{forward_table}` (`{forward_field}`) ON DELETE CASCADE\n" 29 | "){extra}{comment}" 30 | ) 31 | _MODIFY_COLUMN_TEMPLATE = "ALTER TABLE `{table_name}` MODIFY COLUMN {column}" 32 | _RENAME_TABLE_TEMPLATE = "ALTER TABLE `{old_table_name}` RENAME TO `{new_table_name}`" 33 | -------------------------------------------------------------------------------- /tests/models_second.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | from enum import IntEnum 3 | 4 | from tortoise import Model, fields 5 | 6 | 7 | class ProductType(IntEnum): 8 | article = 1 9 | page = 2 10 | 11 | 12 | class PermissionAction(IntEnum): 13 | create = 1 14 | delete = 2 15 | update = 3 16 | read = 4 17 | 18 | 19 | class Status(IntEnum): 20 | on = 1 21 | off = 0 22 | 23 | 24 | class User(Model): 25 | username = fields.CharField(max_length=20, unique=True) 26 | password = fields.CharField(max_length=200) 27 | last_login = fields.DatetimeField(description="Last Login", default=datetime.datetime.now) 28 | is_active = fields.BooleanField(default=True, description="Is Active") 29 | is_superuser = fields.BooleanField(default=False, description="Is SuperUser") 30 | avatar = fields.CharField(max_length=200, default="") 31 | intro = fields.TextField(default="") 32 | 33 | 34 | class Email(Model): 35 | email = fields.CharField(max_length=200) 36 | is_primary = fields.BooleanField(default=False) 37 | user = fields.ForeignKeyField("models_second.User", db_constraint=False) 38 | 39 | 40 | class Category(Model): 41 | slug = fields.CharField(max_length=200) 42 | name = fields.CharField(max_length=200) 43 | user = fields.ForeignKeyField("models_second.User", description="User") 44 | created_at = fields.DatetimeField(auto_now_add=True) 45 | 46 | 47 | class Product(Model): 48 | categories = fields.ManyToManyField("models_second.Category") 49 | name = fields.CharField(max_length=50) 50 | view_num = fields.IntField(description="View Num") 51 | sort = fields.IntField() 52 | is_reviewed = fields.BooleanField(description="Is Reviewed") 53 | type = fields.IntEnumField( 54 | ProductType, description="Product Type", source_field="type_db_alias" 55 | ) 56 | image = fields.CharField(max_length=200) 57 | body = fields.TextField() 58 | created_at = fields.DatetimeField(auto_now_add=True) 59 | 60 | 61 | class Config(Model): 62 | label = fields.CharField(max_length=200) 63 | key = fields.CharField(max_length=20) 64 | value = fields.JSONField() 65 | status: Status = fields.IntEnumField(Status, default=Status.on) 66 | -------------------------------------------------------------------------------- /tests/old_models.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | from enum import IntEnum 3 | 4 | from tortoise import Model, fields 5 | 6 | 7 | class ProductType(IntEnum): 8 | article = 1 9 | page = 2 10 | 11 | 12 | class PermissionAction(IntEnum): 13 | create = 1 14 | delete = 2 15 | update = 3 16 | read = 4 17 | 18 | 19 | class Status(IntEnum): 20 | on = 1 21 | off = 0 22 | 23 | 24 | class User(Model): 25 | username = fields.CharField(max_length=20) 26 | password = fields.CharField(max_length=200) 27 | last_login = fields.DatetimeField(description="Last Login", default=datetime.datetime.now) 28 | is_active = fields.BooleanField(default=True, description="Is Active") 29 | is_superuser = fields.BooleanField(default=False, description="Is SuperUser") 30 | avatar = fields.CharField(max_length=200, default="") 31 | intro = fields.TextField(default="") 32 | longitude = fields.DecimalField(max_digits=12, decimal_places=9) 33 | 34 | 35 | class Email(Model): 36 | email = fields.CharField(max_length=200) 37 | is_primary = fields.BooleanField(default=False) 38 | user = fields.ForeignKeyField("models.User", db_constraint=False) 39 | 40 | 41 | class Category(Model): 42 | slug = fields.CharField(max_length=200) 43 | name = fields.CharField(max_length=200) 44 | user = fields.ForeignKeyField("models.User", description="User") 45 | created_at = fields.DatetimeField(auto_now_add=True) 46 | 47 | 48 | class Product(Model): 49 | categories = fields.ManyToManyField("models.Category") 50 | name = fields.CharField(max_length=50) 51 | view_num = fields.IntField(description="View Num") 52 | sort = fields.IntField() 53 | is_reviewed = fields.BooleanField(description="Is Reviewed") 54 | type = fields.IntEnumField( 55 | ProductType, description="Product Type", source_field="type_db_alias" 56 | ) 57 | image = fields.CharField(max_length=200) 58 | body = fields.TextField() 59 | created_at = fields.DatetimeField(auto_now_add=True) 60 | 61 | 62 | class Config(Model): 63 | label = fields.CharField(max_length=200) 64 | key = fields.CharField(max_length=20) 65 | value = fields.JSONField() 66 | status: Status = fields.IntEnumField(Status, default=Status.on) 67 | 68 | class Meta: 69 | table = "configs" 70 | -------------------------------------------------------------------------------- /aerich/ddl/postgres/__init__.py: -------------------------------------------------------------------------------- 1 | from typing import Type 2 | 3 | from tortoise import Model 4 | from tortoise.backends.asyncpg.schema_generator import AsyncpgSchemaGenerator 5 | 6 | from aerich.ddl import BaseDDL 7 | 8 | 9 | class PostgresDDL(BaseDDL): 10 | schema_generator_cls = AsyncpgSchemaGenerator 11 | DIALECT = AsyncpgSchemaGenerator.DIALECT 12 | _ADD_INDEX_TEMPLATE = 'CREATE {unique}INDEX "{index_name}" ON "{table_name}" ({column_names})' 13 | _DROP_INDEX_TEMPLATE = 'DROP INDEX "{index_name}"' 14 | _ALTER_NULL_TEMPLATE = 'ALTER TABLE "{table_name}" ALTER COLUMN "{column}" {set_drop} NOT NULL' 15 | _MODIFY_COLUMN_TEMPLATE = ( 16 | 'ALTER TABLE "{table_name}" ALTER COLUMN "{column}" TYPE {datatype}{using}' 17 | ) 18 | _SET_COMMENT_TEMPLATE = 'COMMENT ON COLUMN "{table_name}"."{column}" IS {comment}' 19 | _DROP_FK_TEMPLATE = 'ALTER TABLE "{table_name}" DROP CONSTRAINT "{fk_name}"' 20 | 21 | def alter_column_null(self, model: "Type[Model]", field_describe: dict): 22 | db_table = model._meta.db_table 23 | return self._ALTER_NULL_TEMPLATE.format( 24 | table_name=db_table, 25 | column=field_describe.get("db_column"), 26 | set_drop="DROP" if field_describe.get("nullable") else "SET", 27 | ) 28 | 29 | def modify_column(self, model: "Type[Model]", field_describe: dict, is_pk: bool = False): 30 | db_table = model._meta.db_table 31 | db_field_types = field_describe.get("db_field_types") 32 | db_column = field_describe.get("db_column") 33 | datatype = db_field_types.get(self.DIALECT) or db_field_types.get("") 34 | return self._MODIFY_COLUMN_TEMPLATE.format( 35 | table_name=db_table, 36 | column=db_column, 37 | datatype=datatype, 38 | using=f' USING "{db_column}"::{datatype}', 39 | ) 40 | 41 | def set_comment(self, model: "Type[Model]", field_describe: dict): 42 | db_table = model._meta.db_table 43 | return self._SET_COMMENT_TEMPLATE.format( 44 | table_name=db_table, 45 | column=field_describe.get("db_column") or field_describe.get("raw_field"), 46 | comment="'{}'".format(field_describe.get("description")) 47 | if field_describe.get("description") 48 | else "NULL", 49 | ) 50 | -------------------------------------------------------------------------------- /aerich/inspectdb/sqlite.py: -------------------------------------------------------------------------------- 1 | from typing import List 2 | 3 | from aerich.inspectdb import Column, Inspect 4 | 5 | 6 | class InspectSQLite(Inspect): 7 | @property 8 | def field_map(self) -> dict: 9 | return { 10 | "INTEGER": self.int_field, 11 | "INT": self.bool_field, 12 | "SMALLINT": self.smallint_field, 13 | "VARCHAR": self.char_field, 14 | "TEXT": self.text_field, 15 | "TIMESTAMP": self.datetime_field, 16 | "REAL": self.float_field, 17 | "BIGINT": self.bigint_field, 18 | "DATE": self.date_field, 19 | "TIME": self.time_field, 20 | "JSON": self.json_field, 21 | "BLOB": self.binary_field, 22 | } 23 | 24 | async def get_columns(self, table: str) -> List[Column]: 25 | columns = [] 26 | sql = f"PRAGMA table_info({table})" 27 | ret = await self.conn.execute_query_dict(sql) 28 | columns_index = await self._get_columns_index(table) 29 | for row in ret: 30 | try: 31 | length = row["type"].split("(")[1].split(")")[0] 32 | except IndexError: 33 | length = None 34 | columns.append( 35 | Column( 36 | name=row["name"], 37 | data_type=row["type"].split("(")[0], 38 | null=row["notnull"] == 0, 39 | default=row["dflt_value"], 40 | length=length, 41 | pk=row["pk"] == 1, 42 | unique=columns_index.get(row["name"]) == "unique", 43 | index=columns_index.get(row["name"]) == "index", 44 | ) 45 | ) 46 | return columns 47 | 48 | async def _get_columns_index(self, table: str): 49 | sql = f"PRAGMA index_list ({table})" 50 | indexes = await self.conn.execute_query_dict(sql) 51 | ret = {} 52 | for index in indexes: 53 | sql = f"PRAGMA index_info({index['name']})" 54 | index_info = (await self.conn.execute_query_dict(sql))[0] 55 | ret[index_info["name"]] = "unique" if index["unique"] else "index" 56 | return ret 57 | 58 | async def get_all_tables(self) -> List[str]: 59 | sql = "select tbl_name from sqlite_master where type='table' and name!='sqlite_sequence'" 60 | ret = await self.conn.execute_query_dict(sql) 61 | return list(map(lambda x: x["tbl_name"], ret)) 62 | -------------------------------------------------------------------------------- /conftest.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import os 3 | 4 | import pytest 5 | from tortoise import Tortoise, expand_db_url, generate_schema_for_client 6 | from tortoise.backends.asyncpg.schema_generator import AsyncpgSchemaGenerator 7 | from tortoise.backends.mysql.schema_generator import MySQLSchemaGenerator 8 | from tortoise.backends.sqlite.schema_generator import SqliteSchemaGenerator 9 | 10 | from aerich.ddl.mysql import MysqlDDL 11 | from aerich.ddl.postgres import PostgresDDL 12 | from aerich.ddl.sqlite import SqliteDDL 13 | from aerich.migrate import Migrate 14 | 15 | db_url = os.getenv("TEST_DB", "sqlite://:memory:") 16 | db_url_second = os.getenv("TEST_DB_SECOND", "sqlite://:memory:") 17 | tortoise_orm = { 18 | "connections": { 19 | "default": expand_db_url(db_url, True), 20 | "second": expand_db_url(db_url_second, True), 21 | }, 22 | "apps": { 23 | "models": {"models": ["tests.models", "aerich.models"], "default_connection": "default"}, 24 | "models_second": {"models": ["tests.models_second"], "default_connection": "second"}, 25 | }, 26 | } 27 | 28 | 29 | @pytest.fixture(scope="function", autouse=True) 30 | def reset_migrate(): 31 | Migrate.upgrade_operators = [] 32 | Migrate.downgrade_operators = [] 33 | Migrate._upgrade_fk_m2m_index_operators = [] 34 | Migrate._downgrade_fk_m2m_index_operators = [] 35 | Migrate._upgrade_m2m = [] 36 | Migrate._downgrade_m2m = [] 37 | 38 | 39 | @pytest.fixture(scope="session") 40 | def event_loop(): 41 | policy = asyncio.get_event_loop_policy() 42 | res = policy.new_event_loop() 43 | asyncio.set_event_loop(res) 44 | res._close = res.close 45 | res.close = lambda: None 46 | 47 | yield res 48 | 49 | res._close() 50 | 51 | 52 | @pytest.fixture(scope="session", autouse=True) 53 | async def initialize_tests(event_loop, request): 54 | await Tortoise.init(config=tortoise_orm, _create_db=True) 55 | await generate_schema_for_client(Tortoise.get_connection("default"), safe=True) 56 | 57 | client = Tortoise.get_connection("default") 58 | if client.schema_generator is MySQLSchemaGenerator: 59 | Migrate.ddl = MysqlDDL(client) 60 | elif client.schema_generator is SqliteSchemaGenerator: 61 | Migrate.ddl = SqliteDDL(client) 62 | elif client.schema_generator is AsyncpgSchemaGenerator: 63 | Migrate.ddl = PostgresDDL(client) 64 | Migrate.dialect = Migrate.ddl.DIALECT 65 | request.addfinalizer(lambda: event_loop.run_until_complete(Tortoise._drop_databases())) 66 | -------------------------------------------------------------------------------- /tests/models.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | import uuid 3 | from enum import IntEnum 4 | 5 | from tortoise import Model, fields 6 | 7 | 8 | class ProductType(IntEnum): 9 | article = 1 10 | page = 2 11 | 12 | 13 | class PermissionAction(IntEnum): 14 | create = 1 15 | delete = 2 16 | update = 3 17 | read = 4 18 | 19 | 20 | class Status(IntEnum): 21 | on = 1 22 | off = 0 23 | 24 | 25 | class User(Model): 26 | username = fields.CharField(max_length=20, unique=True) 27 | password = fields.CharField(max_length=100) 28 | last_login = fields.DatetimeField(description="Last Login", default=datetime.datetime.now) 29 | is_active = fields.BooleanField(default=True, description="Is Active") 30 | is_superuser = fields.BooleanField(default=False, description="Is SuperUser") 31 | intro = fields.TextField(default="") 32 | longitude = fields.DecimalField(max_digits=10, decimal_places=8) 33 | 34 | 35 | class Email(Model): 36 | email_id = fields.IntField(pk=True) 37 | email = fields.CharField(max_length=200, index=True) 38 | is_primary = fields.BooleanField(default=False) 39 | address = fields.CharField(max_length=200) 40 | users = fields.ManyToManyField("models.User") 41 | 42 | 43 | def default_name(): 44 | return uuid.uuid4() 45 | 46 | 47 | class Category(Model): 48 | slug = fields.CharField(max_length=100) 49 | name = fields.CharField(max_length=200, null=True, default=default_name) 50 | user = fields.ForeignKeyField("models.User", description="User") 51 | created_at = fields.DatetimeField(auto_now_add=True) 52 | 53 | 54 | class Product(Model): 55 | categories = fields.ManyToManyField("models.Category") 56 | name = fields.CharField(max_length=50) 57 | view_num = fields.IntField(description="View Num", default=0) 58 | sort = fields.IntField() 59 | is_reviewed = fields.BooleanField(description="Is Reviewed") 60 | type = fields.IntEnumField( 61 | ProductType, description="Product Type", source_field="type_db_alias" 62 | ) 63 | pic = fields.CharField(max_length=200) 64 | body = fields.TextField() 65 | created_at = fields.DatetimeField(auto_now_add=True) 66 | 67 | class Meta: 68 | unique_together = (("name", "type"),) 69 | indexes = (("name", "type"),) 70 | 71 | 72 | class Config(Model): 73 | label = fields.CharField(max_length=200) 74 | key = fields.CharField(max_length=20) 75 | value = fields.JSONField() 76 | status: Status = fields.IntEnumField(Status) 77 | user = fields.ForeignKeyField("models.User", description="User") 78 | 79 | 80 | class NewModel(Model): 81 | name = fields.CharField(max_length=50) 82 | -------------------------------------------------------------------------------- /aerich/inspectdb/mysql.py: -------------------------------------------------------------------------------- 1 | from typing import List 2 | 3 | from aerich.inspectdb import Column, Inspect 4 | 5 | 6 | class InspectMySQL(Inspect): 7 | @property 8 | def field_map(self) -> dict: 9 | return { 10 | "int": self.int_field, 11 | "smallint": self.smallint_field, 12 | "tinyint": self.bool_field, 13 | "bigint": self.bigint_field, 14 | "varchar": self.char_field, 15 | "char": self.char_field, 16 | "longtext": self.text_field, 17 | "text": self.text_field, 18 | "datetime": self.datetime_field, 19 | "float": self.float_field, 20 | "date": self.date_field, 21 | "time": self.time_field, 22 | "decimal": self.decimal_field, 23 | "json": self.json_field, 24 | "longblob": self.binary_field, 25 | } 26 | 27 | async def get_all_tables(self) -> List[str]: 28 | sql = "select TABLE_NAME from information_schema.TABLES where TABLE_SCHEMA=%s" 29 | ret = await self.conn.execute_query_dict(sql, [self.database]) 30 | return list(map(lambda x: x["TABLE_NAME"], ret)) 31 | 32 | async def get_columns(self, table: str) -> List[Column]: 33 | columns = [] 34 | sql = """select c.*, s.NON_UNIQUE, s.INDEX_NAME 35 | from information_schema.COLUMNS c 36 | left join information_schema.STATISTICS s on c.TABLE_NAME = s.TABLE_NAME 37 | and c.TABLE_SCHEMA = s.TABLE_SCHEMA 38 | and c.COLUMN_NAME = s.COLUMN_NAME 39 | where c.TABLE_SCHEMA = %s 40 | and c.TABLE_NAME = %s""" 41 | ret = await self.conn.execute_query_dict(sql, [self.database, table]) 42 | for row in ret: 43 | non_unique = row["NON_UNIQUE"] 44 | if non_unique is None: 45 | unique = False 46 | else: 47 | unique = not non_unique 48 | index_name = row["INDEX_NAME"] 49 | if index_name is None: 50 | index = False 51 | else: 52 | index = row["INDEX_NAME"] != "PRIMARY" 53 | columns.append( 54 | Column( 55 | name=row["COLUMN_NAME"], 56 | data_type=row["DATA_TYPE"], 57 | null=row["IS_NULLABLE"] == "YES", 58 | default=row["COLUMN_DEFAULT"], 59 | pk=row["COLUMN_KEY"] == "PRI", 60 | comment=row["COLUMN_COMMENT"], 61 | unique=row["COLUMN_KEY"] == "UNI", 62 | extra=row["EXTRA"], 63 | unque=unique, 64 | index=index, 65 | length=row["CHARACTER_MAXIMUM_LENGTH"], 66 | max_digits=row["NUMERIC_PRECISION"], 67 | decimal_places=row["NUMERIC_SCALE"], 68 | ) 69 | ) 70 | return columns 71 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Created by .ignore support plugin (hsz.mobi) 2 | ### Python template 3 | # Byte-compiled / optimized / DLL files 4 | __pycache__/ 5 | *.py[cod] 6 | *$py.class 7 | 8 | # C extensions 9 | *.so 10 | 11 | # Distribution / packaging 12 | .Python 13 | build/ 14 | develop-eggs/ 15 | dist/ 16 | downloads/ 17 | eggs/ 18 | .eggs/ 19 | lib/ 20 | lib64/ 21 | parts/ 22 | sdist/ 23 | var/ 24 | wheels/ 25 | pip-wheel-metadata/ 26 | share/python-wheels/ 27 | *.egg-info/ 28 | .installed.cfg 29 | *.egg 30 | MANIFEST 31 | 32 | # PyInstaller 33 | # Usually these files are written by a python script from a template 34 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 35 | *.manifest 36 | *.spec 37 | 38 | # Installer logs 39 | pip-log.txt 40 | pip-delete-this-directory.txt 41 | 42 | # Unit test / coverage reports 43 | htmlcov/ 44 | .tox/ 45 | .nox/ 46 | .coverage 47 | .coverage.* 48 | .cache 49 | nosetests.xml 50 | coverage.xml 51 | *.cover 52 | *.py,cover 53 | .hypothesis/ 54 | .pytest_cache/ 55 | cover/ 56 | 57 | # Translations 58 | *.mo 59 | *.pot 60 | 61 | # Django stuff: 62 | *.log 63 | local_settings.py 64 | db.sqlite3 65 | db.sqlite3-journal 66 | 67 | # Flask stuff: 68 | instance/ 69 | .webassets-cache 70 | 71 | # Scrapy stuff: 72 | .scrapy 73 | 74 | # Sphinx documentation 75 | docs/_build/ 76 | 77 | # PyBuilder 78 | .pybuilder/ 79 | target/ 80 | 81 | # Jupyter Notebook 82 | .ipynb_checkpoints 83 | 84 | # IPython 85 | profile_default/ 86 | ipython_config.py 87 | 88 | # pyenv 89 | # For a library or package, you might want to ignore these files since the code is 90 | # intended to run in multiple environments; otherwise, check them in: 91 | # .python-version 92 | 93 | # pipenv 94 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 95 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 96 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 97 | # install all needed dependencies. 98 | #Pipfile.lock 99 | 100 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 101 | __pypackages__/ 102 | 103 | # Celery stuff 104 | celerybeat-schedule 105 | celerybeat.pid 106 | 107 | # SageMath parsed files 108 | *.sage.py 109 | 110 | # Environments 111 | .env 112 | .venv 113 | env/ 114 | venv/ 115 | ENV/ 116 | env.bak/ 117 | venv.bak/ 118 | 119 | # Spyder project settings 120 | .spyderproject 121 | .spyproject 122 | 123 | # Rope project settings 124 | .ropeproject 125 | 126 | # mkdocs documentation 127 | /site 128 | 129 | # mypy 130 | .mypy_cache/ 131 | .dmypy.json 132 | dmypy.json 133 | 134 | # Pyre type checker 135 | .pyre/ 136 | 137 | # pytype static type analyzer 138 | .pytype/ 139 | 140 | # Cython debug symbols 141 | cython_debug/ 142 | 143 | .idea 144 | migrations 145 | aerich.ini 146 | src 147 | .vscode 148 | .DS_Store 149 | .python-version -------------------------------------------------------------------------------- /aerich/utils.py: -------------------------------------------------------------------------------- 1 | import importlib.util 2 | import os 3 | import re 4 | import sys 5 | from pathlib import Path 6 | from typing import Dict 7 | 8 | from click import BadOptionUsage, ClickException, Context 9 | from tortoise import BaseDBAsyncClient, Tortoise 10 | 11 | 12 | def add_src_path(path: str) -> str: 13 | """ 14 | add a folder to the paths, so we can import from there 15 | :param path: path to add 16 | :return: absolute path 17 | """ 18 | if not os.path.isabs(path): 19 | # use the absolute path, otherwise some other things (e.g. __file__) won't work properly 20 | path = os.path.abspath(path) 21 | if not os.path.isdir(path): 22 | raise ClickException(f"Specified source folder does not exist: {path}") 23 | if path not in sys.path: 24 | sys.path.insert(0, path) 25 | return path 26 | 27 | 28 | def get_app_connection_name(config, app_name: str) -> str: 29 | """ 30 | get connection name 31 | :param config: 32 | :param app_name: 33 | :return: 34 | """ 35 | app = config.get("apps").get(app_name) 36 | if app: 37 | return app.get("default_connection", "default") 38 | raise BadOptionUsage( 39 | option_name="--app", 40 | message=f'Can\'t get app named "{app_name}"', 41 | ) 42 | 43 | 44 | def get_app_connection(config, app) -> BaseDBAsyncClient: 45 | """ 46 | get connection name 47 | :param config: 48 | :param app: 49 | :return: 50 | """ 51 | return Tortoise.get_connection(get_app_connection_name(config, app)) 52 | 53 | 54 | def get_tortoise_config(ctx: Context, tortoise_orm: str) -> dict: 55 | """ 56 | get tortoise config from module 57 | :param ctx: 58 | :param tortoise_orm: 59 | :return: 60 | """ 61 | splits = tortoise_orm.split(".") 62 | config_path = ".".join(splits[:-1]) 63 | tortoise_config = splits[-1] 64 | 65 | try: 66 | config_module = importlib.import_module(config_path) 67 | except ModuleNotFoundError as e: 68 | raise ClickException(f"Error while importing configuration module: {e}") from None 69 | 70 | config = getattr(config_module, tortoise_config, None) 71 | if not config: 72 | raise BadOptionUsage( 73 | option_name="--config", 74 | message=f'Can\'t get "{tortoise_config}" from module "{config_module}"', 75 | ctx=ctx, 76 | ) 77 | return config 78 | 79 | 80 | def get_models_describe(app: str) -> Dict: 81 | """ 82 | get app models describe 83 | :param app: 84 | :return: 85 | """ 86 | ret = {} 87 | for model in Tortoise.apps.get(app).values(): 88 | describe = model.describe() 89 | ret[describe.get("name")] = describe 90 | return ret 91 | 92 | 93 | def is_default_function(string: str): 94 | return re.match(r"^$", str(string or "")) 95 | 96 | 97 | def import_py_file(file: Path): 98 | module_name, file_ext = os.path.splitext(os.path.split(file)[-1]) 99 | spec = importlib.util.spec_from_file_location(module_name, file) 100 | module = importlib.util.module_from_spec(spec) 101 | spec.loader.exec_module(module) 102 | return module 103 | -------------------------------------------------------------------------------- /aerich/inspectdb/postgres.py: -------------------------------------------------------------------------------- 1 | from typing import List, Optional 2 | 3 | from tortoise import BaseDBAsyncClient 4 | 5 | from aerich.inspectdb import Column, Inspect 6 | 7 | 8 | class InspectPostgres(Inspect): 9 | def __init__(self, conn: BaseDBAsyncClient, tables: Optional[List[str]] = None): 10 | super().__init__(conn, tables) 11 | self.schema = self.conn.server_settings.get("schema") or "public" 12 | 13 | @property 14 | def field_map(self) -> dict: 15 | return { 16 | "int4": self.int_field, 17 | "int8": self.int_field, 18 | "smallint": self.smallint_field, 19 | "varchar": self.char_field, 20 | "text": self.text_field, 21 | "bigint": self.bigint_field, 22 | "timestamptz": self.datetime_field, 23 | "float4": self.float_field, 24 | "float8": self.float_field, 25 | "date": self.date_field, 26 | "time": self.time_field, 27 | "decimal": self.decimal_field, 28 | "numeric": self.decimal_field, 29 | "uuid": self.uuid_field, 30 | "jsonb": self.json_field, 31 | "bytea": self.binary_field, 32 | "bool": self.bool_field, 33 | "timestamp": self.datetime_field, 34 | } 35 | 36 | async def get_all_tables(self) -> List[str]: 37 | sql = "select TABLE_NAME from information_schema.TABLES where table_catalog=$1 and table_schema=$2" 38 | ret = await self.conn.execute_query_dict(sql, [self.database, self.schema]) 39 | return list(map(lambda x: x["table_name"], ret)) 40 | 41 | async def get_columns(self, table: str) -> List[Column]: 42 | columns = [] 43 | sql = f"""select c.column_name, 44 | col_description('public.{table}'::regclass, ordinal_position) as column_comment, 45 | t.constraint_type as column_key, 46 | udt_name as data_type, 47 | is_nullable, 48 | column_default, 49 | character_maximum_length, 50 | numeric_precision, 51 | numeric_scale 52 | from information_schema.constraint_column_usage const 53 | join information_schema.table_constraints t 54 | using (table_catalog, table_schema, table_name, constraint_catalog, constraint_schema, constraint_name) 55 | right join information_schema.columns c using (column_name, table_catalog, table_schema, table_name) 56 | where c.table_catalog = $1 57 | and c.table_name = $2 58 | and c.table_schema = $3""" 59 | ret = await self.conn.execute_query_dict(sql, [self.database, table, self.schema]) 60 | for row in ret: 61 | columns.append( 62 | Column( 63 | name=row["column_name"], 64 | data_type=row["data_type"], 65 | null=row["is_nullable"] == "YES", 66 | default=row["column_default"], 67 | length=row["character_maximum_length"], 68 | max_digits=row["numeric_precision"], 69 | decimal_places=row["numeric_scale"], 70 | comment=row["column_comment"], 71 | pk=row["column_key"] == "PRIMARY KEY", 72 | unique=False, # can't get this simply 73 | index=False, # can't get this simply 74 | ) 75 | ) 76 | return columns 77 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # ChangeLog 2 | 3 | ## 0.7 4 | 5 | ### 0.7.2 6 | 7 | - Support virtual fields. 8 | - Fix modify multiple times. (#279) 9 | - Added `-i` and `--in-transaction` options to `aerich migrate` command. (#296) 10 | - Fix generates two semicolons in a row. (#301) 11 | 12 | ### 0.7.1 13 | 14 | - Fix syntax error with python3.8.10. (#265) 15 | - Fix sql generate error. (#263) 16 | - Fix initialize an empty database. (#267) 17 | 18 | ### 0.7.1rc1 19 | 20 | - Fix postgres sql error (#263) 21 | 22 | ### 0.7.0 23 | 24 | **Now aerich use `.py` file to record versions.** 25 | 26 | Upgrade Note: 27 | 28 | 1. Drop `aerich` table 29 | 2. Delete `migrations/models` folder 30 | 3. Run `aerich init-db` 31 | 32 | - Improve `inspectdb` adding support to `postgresql::numeric` data type 33 | - Add support for dynamically load DDL classes easing to add support to 34 | new databases without changing `Migrate` class logic 35 | - Fix decimal field change. (#246) 36 | - Support add/remove field with index. 37 | 38 | ## 0.6 39 | 40 | ### 0.6.3 41 | 42 | - Improve `inspectdb` and support `postgres` & `sqlite`. 43 | 44 | ### 0.6.2 45 | 46 | - Support migration for specified index. (#203) 47 | 48 | ### 0.6.1 49 | 50 | - Fix `pyproject.toml` not existing error. (#217) 51 | 52 | ### 0.6.0 53 | 54 | - Change default config file from `aerich.ini` to `pyproject.toml`. (#197) 55 | 56 | **Upgrade note:** 57 | 1. Run `aerich init -t config.TORTOISE_ORM`. 58 | 2. Remove `aerich.ini`. 59 | - Remove `pydantic` dependency. (#198) 60 | - `inspectdb` support `DATE`. (#215) 61 | 62 | ## 0.5 63 | 64 | ### 0.5.8 65 | 66 | - Support `indexes` change. (#193) 67 | 68 | ### 0.5.7 69 | 70 | - Fix no module found error. (#188) (#189) 71 | 72 | ### 0.5.6 73 | 74 | - Add `Command` class. (#148) (#141) (#123) (#106) 75 | - Fix: migrate doesn't use source_field in unique_together. (#181) 76 | 77 | ### 0.5.5 78 | 79 | - Fix KeyError: 'src_folder' after upgrading aerich to 0.5.4. (#176) 80 | - Fix MySQL 5.X rename column. 81 | - Fix `db_constraint` when fk changed. (#179) 82 | 83 | ### 0.5.4 84 | 85 | - Fix incorrect index creation order. (#151) 86 | - Not catch exception when import config. (#164) 87 | - Support `drop column` for sqlite. (#40) 88 | 89 | ### 0.5.3 90 | 91 | - Fix postgre alter null. (#142) 92 | - Fix default function when migrate. (#147) 93 | 94 | ### 0.5.2 95 | 96 | - Fix rename field on the field add. (#134) 97 | - Fix postgres field type change error. (#135) 98 | - Fix inspectdb for `FloatField`. (#138) 99 | - Support `rename table`. (#139) 100 | 101 | ### 0.5.1 102 | 103 | - Fix tortoise connections not being closed properly. (#120) 104 | - Fix bug for field change. (#119) 105 | - Fix drop model in the downgrade. (#132) 106 | 107 | ### 0.5.0 108 | 109 | - Refactor core code, now has no limitation for everything. 110 | 111 | ## 0.4 112 | 113 | ### 0.4.4 114 | 115 | - Fix unnecessary import. (#113) 116 | 117 | ### 0.4.3 118 | 119 | - Replace migrations separator to sql standard comment. 120 | - Add `inspectdb` command. 121 | 122 | ### 0.4.2 123 | 124 | - Use `pathlib` for path resolving. (#89) 125 | - Fix upgrade in new db. (#96) 126 | - Fix packaging error. (#92) 127 | 128 | ### 0.4.1 129 | 130 | - Bug fix. (#91 #93) 131 | 132 | ### 0.4.0 133 | 134 | - Use `.sql` instead of `.json` to store version file. 135 | - Add `rename` column support MySQL5. 136 | - Remove callable detection for defaults. (#87) 137 | - Fix `sqlite` stuck. (#90) 138 | 139 | ## 0.3 140 | 141 | ### 0.3.3 142 | 143 | - Fix encoding error. (#75) 144 | - Support multiple databases. (#68) 145 | - Compatible with models file in directory. (#70) 146 | 147 | ### 0.3.2 148 | 149 | - Fix migrate to new database error. (#62) 150 | 151 | ### 0.3.1 152 | 153 | - Fix first version error. 154 | - Fix init error. (#61) 155 | 156 | ### 0.3.0 157 | 158 | - Refactoring migrate logic, and this version is not compatible with previous version. 159 | - Now there don't need `old_models.py` and it store in database. 160 | - Upgrade steps: 161 | 1. Upgrade aerich version. 162 | 2. Drop aerich table in database. 163 | 3. Delete `migrations/{app}` folder and rerun `aerich init-db`. 164 | 4. Update model and `aerich migrate` normally. 165 | 166 | ## 0.2 167 | 168 | ### 0.2.5 169 | 170 | - Fix windows support. (#46) 171 | - Support `db_constraint` in fk, m2m should manual define table with fk. (#52) 172 | 173 | ### 0.2.4 174 | 175 | - Raise error with SQLite unsupported features. 176 | - Fix Postgres alter table. (#48) 177 | - Add `Rename` support. 178 | 179 | ### 0.2.3 180 | 181 | - Fix tortoise ssl config. 182 | - PostgreSQL add/drop index/unique. 183 | 184 | ### 0.2.2 185 | 186 | - Fix postgres drop fk. 187 | - Fix version sort. 188 | 189 | ### 0.2.1 190 | 191 | - Fix bug in windows. 192 | - Enhance PostgreSQL support. 193 | 194 | ### 0.2.0 195 | 196 | - Update model file find method. 197 | - Set `--safe` bool. 198 | 199 | ## 0.1 200 | 201 | ### 0.1.9 202 | 203 | - Fix default_connection when upgrade 204 | - Find default app instead of default. 205 | - Diff MySQL ddl. 206 | - Check tortoise config. 207 | 208 | ### 0.1.8 209 | 210 | - Fix upgrade error when migrate. 211 | - Fix init db sql error. 212 | - Support change column. 213 | 214 | ### 0.1.7 215 | 216 | - Exclude models.Aerich. 217 | - Add init record when init-db. 218 | - Fix version num str. 219 | 220 | ### 0.1.6 221 | 222 | - update dependency_links 223 | 224 | ### 0.1.5 225 | 226 | - Add sqlite and postgres support. 227 | - Fix dependency import. 228 | - Store versions in db. 229 | 230 | ### 0.1.4 231 | 232 | - Fix transaction and fields import. 233 | - Make unique index worked. 234 | - Add cli --version. 235 | 236 | ### 0.1.3 237 | 238 | - Support indexes and unique_together. 239 | 240 | ### 0.1.2 241 | 242 | - Now aerich support m2m. 243 | - Add cli cmd init-db. 244 | - Change cli options. 245 | 246 | ### 0.1.1 247 | 248 | - Now aerich is basic worked. 249 | -------------------------------------------------------------------------------- /aerich/__init__.py: -------------------------------------------------------------------------------- 1 | import os 2 | from pathlib import Path 3 | from typing import List 4 | 5 | from tortoise import Tortoise, generate_schema_for_client 6 | from tortoise.exceptions import OperationalError 7 | from tortoise.transactions import in_transaction 8 | from tortoise.utils import get_schema_sql 9 | 10 | from aerich.exceptions import DowngradeError 11 | from aerich.inspectdb.mysql import InspectMySQL 12 | from aerich.inspectdb.postgres import InspectPostgres 13 | from aerich.inspectdb.sqlite import InspectSQLite 14 | from aerich.migrate import MIGRATE_TEMPLATE, Migrate 15 | from aerich.models import Aerich 16 | from aerich.utils import ( 17 | get_app_connection, 18 | get_app_connection_name, 19 | get_models_describe, 20 | import_py_file, 21 | ) 22 | 23 | 24 | class Command: 25 | def __init__( 26 | self, 27 | tortoise_config: dict, 28 | app: str = "models", 29 | location: str = "./migrations", 30 | ): 31 | self.tortoise_config = tortoise_config 32 | self.app = app 33 | self.location = location 34 | Migrate.app = app 35 | 36 | async def init(self): 37 | await Migrate.init(self.tortoise_config, self.app, self.location) 38 | 39 | async def _upgrade(self, conn, version_file): 40 | file_path = Path(Migrate.migrate_location, version_file) 41 | m = import_py_file(file_path) 42 | upgrade = getattr(m, "upgrade") 43 | await conn.execute_script(await upgrade(conn)) 44 | await Aerich.create( 45 | version=version_file, 46 | app=self.app, 47 | content=get_models_describe(self.app), 48 | ) 49 | 50 | async def upgrade(self, run_in_transaction: bool): 51 | migrated = [] 52 | for version_file in Migrate.get_all_version_files(): 53 | try: 54 | exists = await Aerich.exists(version=version_file, app=self.app) 55 | except OperationalError: 56 | exists = False 57 | if not exists: 58 | app_conn_name = get_app_connection_name(self.tortoise_config, self.app) 59 | if run_in_transaction: 60 | async with in_transaction(app_conn_name) as conn: 61 | await self._upgrade(conn, version_file) 62 | else: 63 | app_conn = get_app_connection(self.tortoise_config, self.app) 64 | await self._upgrade(app_conn, version_file) 65 | migrated.append(version_file) 66 | return migrated 67 | 68 | async def downgrade(self, version: int, delete: bool): 69 | ret = [] 70 | if version == -1: 71 | specified_version = await Migrate.get_last_version() 72 | else: 73 | specified_version = await Aerich.filter( 74 | app=self.app, version__startswith=f"{version}_" 75 | ).first() 76 | if not specified_version: 77 | raise DowngradeError("No specified version found") 78 | if version == -1: 79 | versions = [specified_version] 80 | else: 81 | versions = await Aerich.filter(app=self.app, pk__gte=specified_version.pk) 82 | for version in versions: 83 | file = version.version 84 | async with in_transaction( 85 | get_app_connection_name(self.tortoise_config, self.app) 86 | ) as conn: 87 | file_path = Path(Migrate.migrate_location, file) 88 | m = import_py_file(file_path) 89 | downgrade = getattr(m, "downgrade") 90 | downgrade_sql = await downgrade(conn) 91 | if not downgrade_sql.strip(): 92 | raise DowngradeError("No downgrade items found") 93 | await conn.execute_script(downgrade_sql) 94 | await version.delete() 95 | if delete: 96 | os.unlink(file_path) 97 | ret.append(file) 98 | return ret 99 | 100 | async def heads(self): 101 | ret = [] 102 | versions = Migrate.get_all_version_files() 103 | for version in versions: 104 | if not await Aerich.exists(version=version, app=self.app): 105 | ret.append(version) 106 | return ret 107 | 108 | async def history(self): 109 | versions = Migrate.get_all_version_files() 110 | return [version for version in versions] 111 | 112 | async def inspectdb(self, tables: List[str] = None) -> str: 113 | connection = get_app_connection(self.tortoise_config, self.app) 114 | dialect = connection.schema_generator.DIALECT 115 | if dialect == "mysql": 116 | cls = InspectMySQL 117 | elif dialect == "postgres": 118 | cls = InspectPostgres 119 | elif dialect == "sqlite": 120 | cls = InspectSQLite 121 | else: 122 | raise NotImplementedError(f"{dialect} is not supported") 123 | inspect = cls(connection, tables) 124 | return await inspect.inspect() 125 | 126 | async def migrate(self, name: str = "update"): 127 | return await Migrate.migrate(name) 128 | 129 | async def init_db(self, safe: bool): 130 | location = self.location 131 | app = self.app 132 | dirname = Path(location, app) 133 | dirname.mkdir(parents=True) 134 | 135 | await Tortoise.init(config=self.tortoise_config) 136 | connection = get_app_connection(self.tortoise_config, app) 137 | await generate_schema_for_client(connection, safe) 138 | 139 | schema = get_schema_sql(connection, safe) 140 | 141 | version = await Migrate.generate_version() 142 | await Aerich.create( 143 | version=version, 144 | app=app, 145 | content=get_models_describe(app), 146 | ) 147 | version_file = Path(dirname, version) 148 | content = MIGRATE_TEMPLATE.format(upgrade_sql=schema, downgrade_sql="") 149 | with open(version_file, "w", encoding="utf-8") as f: 150 | f.write(content) 151 | -------------------------------------------------------------------------------- /aerich/inspectdb/__init__.py: -------------------------------------------------------------------------------- 1 | from typing import Any, List, Optional 2 | 3 | from pydantic import BaseModel 4 | from tortoise import BaseDBAsyncClient 5 | 6 | 7 | class Column(BaseModel): 8 | name: str 9 | data_type: str 10 | null: bool 11 | default: Any 12 | comment: Optional[str] 13 | pk: bool 14 | unique: bool 15 | index: bool 16 | length: Optional[int] 17 | extra: Optional[str] 18 | decimal_places: Optional[int] 19 | max_digits: Optional[int] 20 | 21 | def translate(self) -> dict: 22 | comment = default = length = index = null = pk = "" 23 | if self.pk: 24 | pk = "pk=True, " 25 | else: 26 | if self.unique: 27 | index = "unique=True, " 28 | else: 29 | if self.index: 30 | index = "index=True, " 31 | if self.data_type in ["varchar", "VARCHAR"]: 32 | length = f"max_length={self.length}, " 33 | if self.data_type in ["decimal", "numeric"]: 34 | length_parts = [] 35 | if self.max_digits: 36 | length_parts.append(f"max_digits={self.max_digits}") 37 | if self.decimal_places: 38 | length_parts.append(f"decimal_places={self.decimal_places}") 39 | length = ", ".join(length_parts) 40 | if self.null: 41 | null = "null=True, " 42 | if self.default is not None: 43 | if self.data_type in ["tinyint", "INT"]: 44 | default = f"default={'True' if self.default == '1' else 'False'}, " 45 | elif self.data_type == "bool": 46 | default = f"default={'True' if self.default == 'true' else 'False'}, " 47 | elif self.data_type in ["datetime", "timestamptz", "TIMESTAMP"]: 48 | if "CURRENT_TIMESTAMP" == self.default: 49 | if "DEFAULT_GENERATED on update CURRENT_TIMESTAMP" == self.extra: 50 | default = "auto_now=True, " 51 | else: 52 | default = "auto_now_add=True, " 53 | else: 54 | if "::" in self.default: 55 | default = f"default={self.default.split('::')[0]}, " 56 | elif self.default.endswith("()"): 57 | default = "" 58 | else: 59 | default = f"default={self.default}, " 60 | 61 | if self.comment: 62 | comment = f"description='{self.comment}', " 63 | return { 64 | "name": self.name, 65 | "pk": pk, 66 | "index": index, 67 | "null": null, 68 | "default": default, 69 | "length": length, 70 | "comment": comment, 71 | } 72 | 73 | 74 | class Inspect: 75 | _table_template = "class {table}(Model):\n" 76 | 77 | def __init__(self, conn: BaseDBAsyncClient, tables: Optional[List[str]] = None): 78 | self.conn = conn 79 | try: 80 | self.database = conn.database 81 | except AttributeError: 82 | pass 83 | self.tables = tables 84 | 85 | @property 86 | def field_map(self) -> dict: 87 | raise NotImplementedError 88 | 89 | async def inspect(self) -> str: 90 | if not self.tables: 91 | self.tables = await self.get_all_tables() 92 | result = "from tortoise import Model, fields\n\n\n" 93 | tables = [] 94 | for table in self.tables: 95 | columns = await self.get_columns(table) 96 | fields = [] 97 | model = self._table_template.format(table=table.title().replace("_", "")) 98 | for column in columns: 99 | field = self.field_map[column.data_type](**column.translate()) 100 | fields.append(" " + field) 101 | tables.append(model + "\n".join(fields)) 102 | return result + "\n\n\n".join(tables) 103 | 104 | async def get_columns(self, table: str) -> List[Column]: 105 | raise NotImplementedError 106 | 107 | async def get_all_tables(self) -> List[str]: 108 | raise NotImplementedError 109 | 110 | @classmethod 111 | def decimal_field(cls, **kwargs) -> str: 112 | return "{name} = fields.DecimalField({pk}{index}{length}{null}{default}{comment})".format( 113 | **kwargs 114 | ) 115 | 116 | @classmethod 117 | def time_field(cls, **kwargs) -> str: 118 | return "{name} = fields.TimeField({null}{default}{comment})".format(**kwargs) 119 | 120 | @classmethod 121 | def date_field(cls, **kwargs) -> str: 122 | return "{name} = fields.DateField({null}{default}{comment})".format(**kwargs) 123 | 124 | @classmethod 125 | def float_field(cls, **kwargs) -> str: 126 | return "{name} = fields.FloatField({null}{default}{comment})".format(**kwargs) 127 | 128 | @classmethod 129 | def datetime_field(cls, **kwargs) -> str: 130 | return "{name} = fields.DatetimeField({null}{default}{comment})".format(**kwargs) 131 | 132 | @classmethod 133 | def text_field(cls, **kwargs) -> str: 134 | return "{name} = fields.TextField({null}{default}{comment})".format(**kwargs) 135 | 136 | @classmethod 137 | def char_field(cls, **kwargs) -> str: 138 | return "{name} = fields.CharField({pk}{index}{length}{null}{default}{comment})".format( 139 | **kwargs 140 | ) 141 | 142 | @classmethod 143 | def int_field(cls, **kwargs) -> str: 144 | return "{name} = fields.IntField({pk}{index}{comment})".format(**kwargs) 145 | 146 | @classmethod 147 | def smallint_field(cls, **kwargs) -> str: 148 | return "{name} = fields.SmallIntField({pk}{index}{comment})".format(**kwargs) 149 | 150 | @classmethod 151 | def bigint_field(cls, **kwargs) -> str: 152 | return "{name} = fields.BigIntField({pk}{index}{default}{comment})".format(**kwargs) 153 | 154 | @classmethod 155 | def bool_field(cls, **kwargs) -> str: 156 | return "{name} = fields.BooleanField({null}{default}{comment})".format(**kwargs) 157 | 158 | @classmethod 159 | def uuid_field(cls, **kwargs) -> str: 160 | return "{name} = fields.UUIDField({pk}{index}{default}{comment})".format(**kwargs) 161 | 162 | @classmethod 163 | def json_field(cls, **kwargs) -> str: 164 | return "{name} = fields.JSONField({null}{default}{comment})".format(**kwargs) 165 | 166 | @classmethod 167 | def binary_field(cls, **kwargs) -> str: 168 | return "{name} = fields.BinaryField({null}{default}{comment})".format(**kwargs) 169 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Aerich 2 | 3 | [![image](https://img.shields.io/pypi/v/aerich.svg?style=flat)](https://pypi.python.org/pypi/aerich) 4 | [![image](https://img.shields.io/github/license/tortoise/aerich)](https://github.com/tortoise/aerich) 5 | [![image](https://github.com/tortoise/aerich/workflows/pypi/badge.svg)](https://github.com/tortoise/aerich/actions?query=workflow:pypi) 6 | [![image](https://github.com/tortoise/aerich/workflows/ci/badge.svg)](https://github.com/tortoise/aerich/actions?query=workflow:ci) 7 | 8 | English | [Русский](./README_RU.md) 9 | 10 | ## Introduction 11 | 12 | Aerich is a database migrations tool for TortoiseORM, which is like alembic for SQLAlchemy, or like Django ORM with 13 | it\'s own migration solution. 14 | 15 | ## Install 16 | 17 | Just install from pypi: 18 | 19 | ```shell 20 | pip install aerich 21 | ``` 22 | 23 | ## Quick Start 24 | 25 | ```shell 26 | > aerich -h 27 | 28 | Usage: aerich [OPTIONS] COMMAND [ARGS]... 29 | 30 | Options: 31 | -V, --version Show the version and exit. 32 | -c, --config TEXT Config file. [default: pyproject.toml] 33 | --app TEXT Tortoise-ORM app name. 34 | -h, --help Show this message and exit. 35 | 36 | Commands: 37 | downgrade Downgrade to specified version. 38 | heads Show current available heads in migrate location. 39 | history List all migrate items. 40 | init Init config file and generate root migrate location. 41 | init-db Generate schema and generate app migrate location. 42 | inspectdb Introspects the database tables to standard output as... 43 | migrate Generate migrate changes file. 44 | upgrade Upgrade to specified version. 45 | ``` 46 | 47 | ## Usage 48 | 49 | You need add `aerich.models` to your `Tortoise-ORM` config first. Example: 50 | 51 | ```python 52 | TORTOISE_ORM = { 53 | "connections": {"default": "mysql://root:123456@127.0.0.1:3306/test"}, 54 | "apps": { 55 | "models": { 56 | "models": ["tests.models", "aerich.models"], 57 | "default_connection": "default", 58 | }, 59 | }, 60 | } 61 | ``` 62 | 63 | ### Initialization 64 | 65 | ```shell 66 | > aerich init -h 67 | 68 | Usage: aerich init [OPTIONS] 69 | 70 | Init config file and generate root migrate location. 71 | 72 | Options: 73 | -t, --tortoise-orm TEXT Tortoise-ORM config module dict variable, like 74 | settings.TORTOISE_ORM. [required] 75 | --location TEXT Migrate store location. [default: ./migrations] 76 | -s, --src_folder TEXT Folder of the source, relative to the project root. 77 | -h, --help Show this message and exit. 78 | ``` 79 | 80 | Initialize the config file and migrations location: 81 | 82 | ```shell 83 | > aerich init -t tests.backends.mysql.TORTOISE_ORM 84 | 85 | Success create migrate location ./migrations 86 | Success write config to pyproject.toml 87 | ``` 88 | 89 | ### Init db 90 | 91 | ```shell 92 | > aerich init-db 93 | 94 | Success create app migrate location ./migrations/models 95 | Success generate schema for app "models" 96 | ``` 97 | 98 | If your Tortoise-ORM app is not the default `models`, you must specify the correct app via `--app`, 99 | e.g. `aerich --app other_models init-db`. 100 | 101 | ### Update models and make migrate 102 | 103 | ```shell 104 | > aerich migrate --name drop_column 105 | 106 | Success migrate 1_202029051520102929_drop_column.py 107 | ``` 108 | 109 | Format of migrate filename is 110 | `{version_num}_{datetime}_{name|update}.py`. 111 | 112 | If `aerich` guesses you are renaming a column, it will ask `Rename {old_column} to {new_column} [True]`. You can choose 113 | `True` to rename column without column drop, or choose `False` to drop the column then create. Note that the latter may 114 | lose data. 115 | 116 | ### Upgrade to latest version 117 | 118 | ```shell 119 | > aerich upgrade 120 | 121 | Success upgrade 1_202029051520102929_drop_column.py 122 | ``` 123 | 124 | Now your db is migrated to latest. 125 | 126 | ### Downgrade to specified version 127 | 128 | ```shell 129 | > aerich downgrade -h 130 | 131 | Usage: aerich downgrade [OPTIONS] 132 | 133 | Downgrade to specified version. 134 | 135 | Options: 136 | -v, --version INTEGER Specified version, default to last. [default: -1] 137 | -d, --delete Delete version files at the same time. [default: 138 | False] 139 | 140 | --yes Confirm the action without prompting. 141 | -h, --help Show this message and exit. 142 | ``` 143 | 144 | ```shell 145 | > aerich downgrade 146 | 147 | Success downgrade 1_202029051520102929_drop_column.py 148 | ``` 149 | 150 | Now your db is rolled back to the specified version. 151 | 152 | ### Show history 153 | 154 | ```shell 155 | > aerich history 156 | 157 | 1_202029051520102929_drop_column.py 158 | ``` 159 | 160 | ### Show heads to be migrated 161 | 162 | ```shell 163 | > aerich heads 164 | 165 | 1_202029051520102929_drop_column.py 166 | ``` 167 | 168 | ### Inspect db tables to TortoiseORM model 169 | 170 | Currently `inspectdb` support MySQL & Postgres & SQLite. 171 | 172 | ```shell 173 | Usage: aerich inspectdb [OPTIONS] 174 | 175 | Introspects the database tables to standard output as TortoiseORM model. 176 | 177 | Options: 178 | -t, --table TEXT Which tables to inspect. 179 | -h, --help Show this message and exit. 180 | ``` 181 | 182 | Inspect all tables and print to console: 183 | 184 | ```shell 185 | aerich --app models inspectdb 186 | ``` 187 | 188 | Inspect a specified table in the default app and redirect to `models.py`: 189 | 190 | ```shell 191 | aerich inspectdb -t user > models.py 192 | ``` 193 | 194 | For example, you table is: 195 | 196 | ```sql 197 | CREATE TABLE `test` 198 | ( 199 | `id` int NOT NULL AUTO_INCREMENT, 200 | `decimal` decimal(10, 2) NOT NULL, 201 | `date` date DEFAULT NULL, 202 | `datetime` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, 203 | `time` time DEFAULT NULL, 204 | `float` float DEFAULT NULL, 205 | `string` varchar(200) COLLATE utf8mb4_general_ci DEFAULT NULL, 206 | `tinyint` tinyint DEFAULT NULL, 207 | PRIMARY KEY (`id`), 208 | KEY `asyncmy_string_index` (`string`) 209 | ) ENGINE = InnoDB 210 | DEFAULT CHARSET = utf8mb4 211 | COLLATE = utf8mb4_general_ci 212 | ``` 213 | 214 | Now run `aerich inspectdb -t test` to see the generated model: 215 | 216 | ```python 217 | from tortoise import Model, fields 218 | 219 | 220 | class Test(Model): 221 | date = fields.DateField(null=True, ) 222 | datetime = fields.DatetimeField(auto_now=True, ) 223 | decimal = fields.DecimalField(max_digits=10, decimal_places=2, ) 224 | float = fields.FloatField(null=True, ) 225 | id = fields.IntField(pk=True, ) 226 | string = fields.CharField(max_length=200, null=True, ) 227 | time = fields.TimeField(null=True, ) 228 | tinyint = fields.BooleanField(null=True, ) 229 | ``` 230 | 231 | Note that this command is limited and can't infer some fields, such as `IntEnumField`, `ForeignKeyField`, and others. 232 | 233 | ### Multiple databases 234 | 235 | ```python 236 | tortoise_orm = { 237 | "connections": { 238 | "default": expand_db_url(db_url, True), 239 | "second": expand_db_url(db_url_second, True), 240 | }, 241 | "apps": { 242 | "models": {"models": ["tests.models", "aerich.models"], "default_connection": "default"}, 243 | "models_second": {"models": ["tests.models_second"], "default_connection": "second", }, 244 | }, 245 | } 246 | ``` 247 | 248 | You only need to specify `aerich.models` in one app, and must specify `--app` when running `aerich migrate` and so on. 249 | 250 | ## Restore `aerich` workflow 251 | 252 | In some cases, such as broken changes from upgrade of `aerich`, you can't run `aerich migrate` or `aerich upgrade`, you 253 | can make the following steps: 254 | 255 | 1. drop `aerich` table. 256 | 2. delete `migrations/{app}` directory. 257 | 3. rerun `aerich init-db`. 258 | 259 | Note that these actions is safe, also you can do that to reset your migrations if your migration files is too many. 260 | 261 | ## Use `aerich` in application 262 | 263 | You can use `aerich` out of cli by use `Command` class. 264 | 265 | ```python 266 | from aerich import Command 267 | 268 | command = Command(tortoise_config=config, app='models') 269 | await command.init() 270 | await command.migrate('test') 271 | ``` 272 | 273 | ## License 274 | 275 | This project is licensed under the 276 | [Apache-2.0](https://github.com/long2ice/aerich/blob/master/LICENSE) License. 277 | -------------------------------------------------------------------------------- /tests/test_ddl.py: -------------------------------------------------------------------------------- 1 | from aerich.ddl.mysql import MysqlDDL 2 | from aerich.ddl.postgres import PostgresDDL 3 | from aerich.ddl.sqlite import SqliteDDL 4 | from aerich.migrate import Migrate 5 | from tests.models import Category, Product, User 6 | 7 | 8 | def test_create_table(): 9 | ret = Migrate.ddl.create_table(Category) 10 | if isinstance(Migrate.ddl, MysqlDDL): 11 | assert ( 12 | ret 13 | == """CREATE TABLE IF NOT EXISTS `category` ( 14 | `id` INT NOT NULL PRIMARY KEY AUTO_INCREMENT, 15 | `slug` VARCHAR(100) NOT NULL, 16 | `name` VARCHAR(200), 17 | `created_at` DATETIME(6) NOT NULL DEFAULT CURRENT_TIMESTAMP(6), 18 | `user_id` INT NOT NULL COMMENT 'User', 19 | CONSTRAINT `fk_category_user_e2e3874c` FOREIGN KEY (`user_id`) REFERENCES `user` (`id`) ON DELETE CASCADE 20 | ) CHARACTER SET utf8mb4""" 21 | ) 22 | 23 | elif isinstance(Migrate.ddl, SqliteDDL): 24 | assert ( 25 | ret 26 | == """CREATE TABLE IF NOT EXISTS "category" ( 27 | "id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, 28 | "slug" VARCHAR(100) NOT NULL, 29 | "name" VARCHAR(200), 30 | "created_at" TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, 31 | "user_id" INT NOT NULL REFERENCES "user" ("id") ON DELETE CASCADE /* User */ 32 | )""" 33 | ) 34 | 35 | elif isinstance(Migrate.ddl, PostgresDDL): 36 | assert ( 37 | ret 38 | == """CREATE TABLE IF NOT EXISTS "category" ( 39 | "id" SERIAL NOT NULL PRIMARY KEY, 40 | "slug" VARCHAR(100) NOT NULL, 41 | "name" VARCHAR(200), 42 | "created_at" TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP, 43 | "user_id" INT NOT NULL REFERENCES "user" ("id") ON DELETE CASCADE 44 | ); 45 | COMMENT ON COLUMN "category"."user_id" IS 'User'""" 46 | ) 47 | 48 | 49 | def test_drop_table(): 50 | ret = Migrate.ddl.drop_table(Category._meta.db_table) 51 | if isinstance(Migrate.ddl, MysqlDDL): 52 | assert ret == "DROP TABLE IF EXISTS `category`" 53 | else: 54 | assert ret == 'DROP TABLE IF EXISTS "category"' 55 | 56 | 57 | def test_add_column(): 58 | ret = Migrate.ddl.add_column(Category, Category._meta.fields_map.get("name").describe(False)) 59 | if isinstance(Migrate.ddl, MysqlDDL): 60 | assert ret == "ALTER TABLE `category` ADD `name` VARCHAR(200)" 61 | else: 62 | assert ret == 'ALTER TABLE "category" ADD "name" VARCHAR(200)' 63 | 64 | 65 | def test_modify_column(): 66 | if isinstance(Migrate.ddl, SqliteDDL): 67 | return 68 | 69 | ret0 = Migrate.ddl.modify_column( 70 | Category, Category._meta.fields_map.get("name").describe(False) 71 | ) 72 | ret1 = Migrate.ddl.modify_column(User, User._meta.fields_map.get("is_active").describe(False)) 73 | if isinstance(Migrate.ddl, MysqlDDL): 74 | assert ret0 == "ALTER TABLE `category` MODIFY COLUMN `name` VARCHAR(200)" 75 | assert ( 76 | ret1 77 | == "ALTER TABLE `user` MODIFY COLUMN `is_active` BOOL NOT NULL COMMENT 'Is Active' DEFAULT 1" 78 | ) 79 | elif isinstance(Migrate.ddl, PostgresDDL): 80 | assert ( 81 | ret0 82 | == 'ALTER TABLE "category" ALTER COLUMN "name" TYPE VARCHAR(200) USING "name"::VARCHAR(200)' 83 | ) 84 | 85 | assert ( 86 | ret1 == 'ALTER TABLE "user" ALTER COLUMN "is_active" TYPE BOOL USING "is_active"::BOOL' 87 | ) 88 | 89 | 90 | def test_alter_column_default(): 91 | if isinstance(Migrate.ddl, SqliteDDL): 92 | return 93 | ret = Migrate.ddl.alter_column_default(User, User._meta.fields_map.get("intro").describe(False)) 94 | if isinstance(Migrate.ddl, PostgresDDL): 95 | assert ret == 'ALTER TABLE "user" ALTER COLUMN "intro" SET DEFAULT \'\'' 96 | elif isinstance(Migrate.ddl, MysqlDDL): 97 | assert ret == "ALTER TABLE `user` ALTER COLUMN `intro` SET DEFAULT ''" 98 | 99 | ret = Migrate.ddl.alter_column_default( 100 | Category, Category._meta.fields_map.get("created_at").describe(False) 101 | ) 102 | if isinstance(Migrate.ddl, PostgresDDL): 103 | assert ( 104 | ret == 'ALTER TABLE "category" ALTER COLUMN "created_at" SET DEFAULT CURRENT_TIMESTAMP' 105 | ) 106 | elif isinstance(Migrate.ddl, MysqlDDL): 107 | assert ( 108 | ret 109 | == "ALTER TABLE `category` ALTER COLUMN `created_at` SET DEFAULT CURRENT_TIMESTAMP(6)" 110 | ) 111 | 112 | ret = Migrate.ddl.alter_column_default( 113 | Product, Product._meta.fields_map.get("view_num").describe(False) 114 | ) 115 | if isinstance(Migrate.ddl, PostgresDDL): 116 | assert ret == 'ALTER TABLE "product" ALTER COLUMN "view_num" SET DEFAULT 0' 117 | elif isinstance(Migrate.ddl, MysqlDDL): 118 | assert ret == "ALTER TABLE `product` ALTER COLUMN `view_num` SET DEFAULT 0" 119 | 120 | 121 | def test_alter_column_null(): 122 | if isinstance(Migrate.ddl, (SqliteDDL, MysqlDDL)): 123 | return 124 | ret = Migrate.ddl.alter_column_null( 125 | Category, Category._meta.fields_map.get("name").describe(False) 126 | ) 127 | if isinstance(Migrate.ddl, PostgresDDL): 128 | assert ret == 'ALTER TABLE "category" ALTER COLUMN "name" DROP NOT NULL' 129 | 130 | 131 | def test_set_comment(): 132 | if isinstance(Migrate.ddl, (SqliteDDL, MysqlDDL)): 133 | return 134 | ret = Migrate.ddl.set_comment(Category, Category._meta.fields_map.get("name").describe(False)) 135 | assert ret == 'COMMENT ON COLUMN "category"."name" IS NULL' 136 | 137 | ret = Migrate.ddl.set_comment(Category, Category._meta.fields_map.get("user").describe(False)) 138 | assert ret == 'COMMENT ON COLUMN "category"."user_id" IS \'User\'' 139 | 140 | 141 | def test_drop_column(): 142 | ret = Migrate.ddl.drop_column(Category, "name") 143 | if isinstance(Migrate.ddl, MysqlDDL): 144 | assert ret == "ALTER TABLE `category` DROP COLUMN `name`" 145 | elif isinstance(Migrate.ddl, PostgresDDL): 146 | assert ret == 'ALTER TABLE "category" DROP COLUMN "name"' 147 | 148 | 149 | def test_add_index(): 150 | index = Migrate.ddl.add_index(Category, ["name"]) 151 | index_u = Migrate.ddl.add_index(Category, ["name"], True) 152 | if isinstance(Migrate.ddl, MysqlDDL): 153 | assert index == "ALTER TABLE `category` ADD INDEX `idx_category_name_8b0cb9` (`name`)" 154 | assert ( 155 | index_u == "ALTER TABLE `category` ADD UNIQUE INDEX `uid_category_name_8b0cb9` (`name`)" 156 | ) 157 | elif isinstance(Migrate.ddl, PostgresDDL): 158 | assert index == 'CREATE INDEX "idx_category_name_8b0cb9" ON "category" ("name")' 159 | assert index_u == 'CREATE UNIQUE INDEX "uid_category_name_8b0cb9" ON "category" ("name")' 160 | else: 161 | assert index == 'ALTER TABLE "category" ADD INDEX "idx_category_name_8b0cb9" ("name")' 162 | assert ( 163 | index_u == 'ALTER TABLE "category" ADD UNIQUE INDEX "uid_category_name_8b0cb9" ("name")' 164 | ) 165 | 166 | 167 | def test_drop_index(): 168 | ret = Migrate.ddl.drop_index(Category, ["name"]) 169 | ret_u = Migrate.ddl.drop_index(Category, ["name"], True) 170 | if isinstance(Migrate.ddl, MysqlDDL): 171 | assert ret == "ALTER TABLE `category` DROP INDEX `idx_category_name_8b0cb9`" 172 | assert ret_u == "ALTER TABLE `category` DROP INDEX `uid_category_name_8b0cb9`" 173 | elif isinstance(Migrate.ddl, PostgresDDL): 174 | assert ret == 'DROP INDEX "idx_category_name_8b0cb9"' 175 | assert ret_u == 'DROP INDEX "uid_category_name_8b0cb9"' 176 | else: 177 | assert ret == 'ALTER TABLE "category" DROP INDEX "idx_category_name_8b0cb9"' 178 | assert ret_u == 'ALTER TABLE "category" DROP INDEX "uid_category_name_8b0cb9"' 179 | 180 | 181 | def test_add_fk(): 182 | ret = Migrate.ddl.add_fk( 183 | Category, Category._meta.fields_map.get("user").describe(False), User.describe(False) 184 | ) 185 | if isinstance(Migrate.ddl, MysqlDDL): 186 | assert ( 187 | ret 188 | == "ALTER TABLE `category` ADD CONSTRAINT `fk_category_user_e2e3874c` FOREIGN KEY (`user_id`) REFERENCES `user` (`id`) ON DELETE CASCADE" 189 | ) 190 | else: 191 | assert ( 192 | ret 193 | == 'ALTER TABLE "category" ADD CONSTRAINT "fk_category_user_e2e3874c" FOREIGN KEY ("user_id") REFERENCES "user" ("id") ON DELETE CASCADE' 194 | ) 195 | 196 | 197 | def test_drop_fk(): 198 | ret = Migrate.ddl.drop_fk( 199 | Category, Category._meta.fields_map.get("user").describe(False), User.describe(False) 200 | ) 201 | if isinstance(Migrate.ddl, MysqlDDL): 202 | assert ret == "ALTER TABLE `category` DROP FOREIGN KEY `fk_category_user_e2e3874c`" 203 | elif isinstance(Migrate.ddl, PostgresDDL): 204 | assert ret == 'ALTER TABLE "category" DROP CONSTRAINT "fk_category_user_e2e3874c"' 205 | else: 206 | assert ret == 'ALTER TABLE "category" DROP FOREIGN KEY "fk_category_user_e2e3874c"' 207 | -------------------------------------------------------------------------------- /README_RU.md: -------------------------------------------------------------------------------- 1 | # Aerich 2 | 3 | [![image](https://img.shields.io/pypi/v/aerich.svg?style=flat)](https://pypi.python.org/pypi/aerich) 4 | [![image](https://img.shields.io/github/license/tortoise/aerich)](https://github.com/tortoise/aerich) 5 | [![image](https://github.com/tortoise/aerich/workflows/pypi/badge.svg)](https://github.com/tortoise/aerich/actions?query=workflow:pypi) 6 | [![image](https://github.com/tortoise/aerich/workflows/ci/badge.svg)](https://github.com/tortoise/aerich/actions?query=workflow:ci) 7 | 8 | [English](./README.md) | Русский 9 | 10 | ## Введение 11 | 12 | Aerich - это инструмент для миграции базы данных для TortoiseORM, который аналогичен Alembic для SQLAlchemy или встроенному решению миграций в Django ORM. 13 | 14 | ## Установка 15 | 16 | Просто установите из pypi: 17 | 18 | ```shell 19 | pip install aerich 20 | ``` 21 | 22 | ## Быстрый старт 23 | 24 | ```shell 25 | > aerich -h 26 | 27 | Usage: aerich [OPTIONS] COMMAND [ARGS]... 28 | 29 | Options: 30 | -V, --version Show the version and exit. 31 | -c, --config TEXT Config file. [default: pyproject.toml] 32 | --app TEXT Tortoise-ORM app name. 33 | -h, --help Show this message and exit. 34 | 35 | Commands: 36 | downgrade Downgrade to specified version. 37 | heads Show current available heads in migrate location. 38 | history List all migrate items. 39 | init Init config file and generate root migrate location. 40 | init-db Generate schema and generate app migrate location. 41 | inspectdb Introspects the database tables to standard output as... 42 | migrate Generate migrate changes file. 43 | upgrade Upgrade to specified version. 44 | ``` 45 | 46 | ## Использование 47 | 48 | Сначала вам нужно добавить aerich.models в конфигурацию вашего Tortoise-ORM. Пример: 49 | 50 | ```python 51 | TORTOISE_ORM = { 52 | "connections": {"default": "mysql://root:123456@127.0.0.1:3306/test"}, 53 | "apps": { 54 | "models": { 55 | "models": ["tests.models", "aerich.models"], 56 | "default_connection": "default", 57 | }, 58 | }, 59 | } 60 | ``` 61 | 62 | ### Инициализация 63 | 64 | ```shell 65 | > aerich init -h 66 | 67 | Usage: aerich init [OPTIONS] 68 | 69 | Init config file and generate root migrate location. 70 | 71 | Options: 72 | -t, --tortoise-orm TEXT Tortoise-ORM config module dict variable, like 73 | settings.TORTOISE_ORM. [required] 74 | --location TEXT Migrate store location. [default: ./migrations] 75 | -s, --src_folder TEXT Folder of the source, relative to the project root. 76 | -h, --help Show this message and exit. 77 | ``` 78 | 79 | Инициализируйте файл конфигурации и задайте местоположение миграций: 80 | 81 | ```shell 82 | > aerich init -t tests.backends.mysql.TORTOISE_ORM 83 | 84 | Success create migrate location ./migrations 85 | Success write config to pyproject.toml 86 | ``` 87 | 88 | ### Инициализация базы данных 89 | 90 | ```shell 91 | > aerich init-db 92 | 93 | Success create app migrate location ./migrations/models 94 | Success generate schema for app "models" 95 | ``` 96 | 97 | Если ваше приложение Tortoise-ORM не является приложением по умолчанию с именем models, вы должны указать правильное имя приложения с помощью параметра --app, например: aerich --app other_models init-db. 98 | 99 | ### Обновление моделей и создание миграции 100 | 101 | ```shell 102 | > aerich migrate --name drop_column 103 | 104 | Success migrate 1_202029051520102929_drop_column.py 105 | ``` 106 | 107 | Формат имени файла миграции следующий: `{версия}_{дата_и_время}_{имя|обновление}.py`. 108 | 109 | Если aerich предполагает, что вы переименовываете столбец, он спросит: 110 | Переименовать `{старый_столбец} в {новый_столбец} [True]`. Вы можете выбрать `True`, 111 | чтобы переименовать столбец без удаления столбца, или выбрать `False`, чтобы удалить столбец, 112 | а затем создать новый. Обратите внимание, что последний вариант может привести к потере данных. 113 | 114 | 115 | ### Обновление до последней версии 116 | 117 | ```shell 118 | > aerich upgrade 119 | 120 | Success upgrade 1_202029051520102929_drop_column.py 121 | ``` 122 | 123 | Теперь ваша база данных обновлена до последней версии. 124 | 125 | ### Откат до указанной версии 126 | 127 | ```shell 128 | > aerich downgrade -h 129 | 130 | Usage: aerich downgrade [OPTIONS] 131 | 132 | Downgrade to specified version. 133 | 134 | Options: 135 | -v, --version INTEGER Specified version, default to last. [default: -1] 136 | -d, --delete Delete version files at the same time. [default: 137 | False] 138 | 139 | --yes Confirm the action without prompting. 140 | -h, --help Show this message and exit. 141 | ``` 142 | 143 | ```shell 144 | > aerich downgrade 145 | 146 | Success downgrade 1_202029051520102929_drop_column.py 147 | ``` 148 | 149 | Теперь ваша база данных откатилась до указанной версии. 150 | 151 | ### Показать историю 152 | 153 | ```shell 154 | > aerich history 155 | 156 | 1_202029051520102929_drop_column.py 157 | ``` 158 | 159 | ### Чтобы узнать, какие миграции должны быть применены, можно использовать команду: 160 | 161 | ```shell 162 | > aerich heads 163 | 164 | 1_202029051520102929_drop_column.py 165 | ``` 166 | 167 | ### Осмотр таблиц базы данных для модели TortoiseORM 168 | 169 | В настоящее время inspectdb поддерживает MySQL, Postgres и SQLite. 170 | 171 | ```shell 172 | Usage: aerich inspectdb [OPTIONS] 173 | 174 | Introspects the database tables to standard output as TortoiseORM model. 175 | 176 | Options: 177 | -t, --table TEXT Which tables to inspect. 178 | -h, --help Show this message and exit. 179 | ``` 180 | 181 | Посмотреть все таблицы и вывести их на консоль: 182 | 183 | ```shell 184 | aerich --app models inspectdb 185 | ``` 186 | 187 | Осмотреть указанную таблицу в приложении по умолчанию и перенаправить в models.py: 188 | 189 | ```shell 190 | aerich inspectdb -t user > models.py 191 | ``` 192 | 193 | Например, ваша таблица выглядит следующим образом: 194 | 195 | ```sql 196 | CREATE TABLE `test` 197 | ( 198 | `id` int NOT NULL AUTO_INCREMENT, 199 | `decimal` decimal(10, 2) NOT NULL, 200 | `date` date DEFAULT NULL, 201 | `datetime` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, 202 | `time` time DEFAULT NULL, 203 | `float` float DEFAULT NULL, 204 | `string` varchar(200) COLLATE utf8mb4_general_ci DEFAULT NULL, 205 | `tinyint` tinyint DEFAULT NULL, 206 | PRIMARY KEY (`id`), 207 | KEY `asyncmy_string_index` (`string`) 208 | ) ENGINE = InnoDB 209 | DEFAULT CHARSET = utf8mb4 210 | COLLATE = utf8mb4_general_ci 211 | ``` 212 | 213 | Теперь выполните команду aerich inspectdb -t test, чтобы увидеть сгенерированную модель: 214 | 215 | ```python 216 | from tortoise import Model, fields 217 | 218 | 219 | class Test(Model): 220 | date = fields.DateField(null=True, ) 221 | datetime = fields.DatetimeField(auto_now=True, ) 222 | decimal = fields.DecimalField(max_digits=10, decimal_places=2, ) 223 | float = fields.FloatField(null=True, ) 224 | id = fields.IntField(pk=True, ) 225 | string = fields.CharField(max_length=200, null=True, ) 226 | time = fields.TimeField(null=True, ) 227 | tinyint = fields.BooleanField(null=True, ) 228 | ``` 229 | 230 | Обратите внимание, что эта команда имеет ограничения и не может автоматически определить некоторые поля, такие как `IntEnumField`, `ForeignKeyField` и другие. 231 | 232 | ### Несколько баз данных 233 | 234 | ```python 235 | tortoise_orm = { 236 | "connections": { 237 | "default": expand_db_url(db_url, True), 238 | "second": expand_db_url(db_url_second, True), 239 | }, 240 | "apps": { 241 | "models": {"models": ["tests.models", "aerich.models"], "default_connection": "default"}, 242 | "models_second": {"models": ["tests.models_second"], "default_connection": "second", }, 243 | }, 244 | } 245 | ``` 246 | 247 | Вам нужно указать `aerich.models` только в одном приложении и должны указывать `--app` при запуске команды `aerich migrate` и т.д. 248 | 249 | ## Восстановление рабочего процесса aerich 250 | 251 | В некоторых случаях, например, при возникновении проблем после обновления `aerich`, вы не можете запустить `aerich migrate` или `aerich upgrade`. В таком случае вы можете выполнить следующие шаги: 252 | 253 | 1. удалите таблицы `aerich`. 254 | 2. удалите директорию `migrations/{app}`. 255 | 3. rerun `aerich init-db`. 256 | 257 | Обратите внимание, что эти действия безопасны, и вы можете использовать их для сброса миграций, если у вас слишком много файлов миграции. 258 | 259 | ## Использование aerich в приложении 260 | 261 | Вы можете использовать `aerich` вне командной строки, используя класс `Command`. 262 | 263 | ```python 264 | from aerich import Command 265 | 266 | command = Command(tortoise_config=config, app='models') 267 | await command.init() 268 | await command.migrate('test') 269 | ``` 270 | 271 | ## Лицензия 272 | 273 | Этот проект лицензирован в соответствии с лицензией 274 | [Apache-2.0](https://github.com/long2ice/aerich/blob/master/LICENSE) Лицензия. -------------------------------------------------------------------------------- /aerich/cli.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import os 3 | from functools import wraps 4 | from pathlib import Path 5 | from typing import List 6 | 7 | import click 8 | import tomlkit 9 | from click import Context, UsageError 10 | from tomlkit.exceptions import NonExistentKey 11 | from tortoise import Tortoise 12 | 13 | from aerich import Command 14 | from aerich.enums import Color 15 | from aerich.exceptions import DowngradeError 16 | from aerich.utils import add_src_path, get_tortoise_config 17 | from aerich.version import __version__ 18 | 19 | CONFIG_DEFAULT_VALUES = { 20 | "src_folder": ".", 21 | } 22 | 23 | 24 | def coro(f): 25 | @wraps(f) 26 | def wrapper(*args, **kwargs): 27 | loop = asyncio.get_event_loop() 28 | 29 | # Close db connections at the end of all but the cli group function 30 | try: 31 | loop.run_until_complete(f(*args, **kwargs)) 32 | finally: 33 | if f.__name__ not in ["cli", "init"]: 34 | loop.run_until_complete(Tortoise.close_connections()) 35 | 36 | return wrapper 37 | 38 | 39 | @click.group(context_settings={"help_option_names": ["-h", "--help"]}) 40 | @click.version_option(__version__, "-V", "--version") 41 | @click.option( 42 | "-c", 43 | "--config", 44 | default="pyproject.toml", 45 | show_default=True, 46 | help="Config file.", 47 | ) 48 | @click.option("--app", required=False, help="Tortoise-ORM app name.") 49 | @click.pass_context 50 | @coro 51 | async def cli(ctx: Context, config, app): 52 | ctx.ensure_object(dict) 53 | ctx.obj["config_file"] = config 54 | 55 | invoked_subcommand = ctx.invoked_subcommand 56 | if invoked_subcommand != "init": 57 | config_path = Path(config) 58 | if not config_path.exists(): 59 | raise UsageError("You must exec init first", ctx=ctx) 60 | content = config_path.read_text("utf-8") 61 | doc = tomlkit.parse(content) 62 | try: 63 | tool = doc["tool"]["aerich"] 64 | location = tool["location"] 65 | tortoise_orm = tool["tortoise_orm"] 66 | src_folder = tool.get("src_folder", CONFIG_DEFAULT_VALUES["src_folder"]) 67 | except NonExistentKey: 68 | raise UsageError("You need run aerich init again when upgrade to 0.6.0+") 69 | add_src_path(src_folder) 70 | tortoise_config = get_tortoise_config(ctx, tortoise_orm) 71 | app = app or list(tortoise_config.get("apps").keys())[0] 72 | command = Command(tortoise_config=tortoise_config, app=app, location=location) 73 | ctx.obj["command"] = command 74 | if invoked_subcommand != "init-db": 75 | if not Path(location, app).exists(): 76 | raise UsageError("You must exec init-db first", ctx=ctx) 77 | await command.init() 78 | 79 | 80 | @cli.command(help="Generate migrate changes file.") 81 | @click.option("--name", default="update", show_default=True, help="Migrate name.") 82 | @click.pass_context 83 | @coro 84 | async def migrate(ctx: Context, name): 85 | command = ctx.obj["command"] 86 | ret = await command.migrate(name) 87 | if not ret: 88 | return click.secho("No changes detected", fg=Color.yellow) 89 | click.secho(f"Success migrate {ret}", fg=Color.green) 90 | 91 | 92 | @cli.command(help="Upgrade to specified version.") 93 | @click.option( 94 | "--in-transaction", 95 | "-i", 96 | default=True, 97 | type=bool, 98 | help="Make migrations in transaction or not. Can be helpful for large migrations or creating concurrent indexes.", 99 | ) 100 | @click.pass_context 101 | @coro 102 | async def upgrade(ctx: Context, in_transaction: bool): 103 | command = ctx.obj["command"] 104 | migrated = await command.upgrade(run_in_transaction=in_transaction) 105 | if not migrated: 106 | click.secho("No upgrade items found", fg=Color.yellow) 107 | else: 108 | for version_file in migrated: 109 | click.secho(f"Success upgrade {version_file}", fg=Color.green) 110 | 111 | 112 | @cli.command(help="Downgrade to specified version.") 113 | @click.option( 114 | "-v", 115 | "--version", 116 | default=-1, 117 | type=int, 118 | show_default=True, 119 | help="Specified version, default to last.", 120 | ) 121 | @click.option( 122 | "-d", 123 | "--delete", 124 | is_flag=True, 125 | default=False, 126 | show_default=True, 127 | help="Delete version files at the same time.", 128 | ) 129 | @click.pass_context 130 | @click.confirmation_option( 131 | prompt="Downgrade is dangerous, which maybe lose your data, are you sure?", 132 | ) 133 | @coro 134 | async def downgrade(ctx: Context, version: int, delete: bool): 135 | command = ctx.obj["command"] 136 | try: 137 | files = await command.downgrade(version, delete) 138 | except DowngradeError as e: 139 | return click.secho(str(e), fg=Color.yellow) 140 | for file in files: 141 | click.secho(f"Success downgrade {file}", fg=Color.green) 142 | 143 | 144 | @cli.command(help="Show current available heads in migrate location.") 145 | @click.pass_context 146 | @coro 147 | async def heads(ctx: Context): 148 | command = ctx.obj["command"] 149 | head_list = await command.heads() 150 | if not head_list: 151 | return click.secho("No available heads, try migrate first", fg=Color.green) 152 | for version in head_list: 153 | click.secho(version, fg=Color.green) 154 | 155 | 156 | @cli.command(help="List all migrate items.") 157 | @click.pass_context 158 | @coro 159 | async def history(ctx: Context): 160 | command = ctx.obj["command"] 161 | versions = await command.history() 162 | if not versions: 163 | return click.secho("No history, try migrate", fg=Color.green) 164 | for version in versions: 165 | click.secho(version, fg=Color.green) 166 | 167 | 168 | @cli.command(help="Init config file and generate root migrate location.") 169 | @click.option( 170 | "-t", 171 | "--tortoise-orm", 172 | required=True, 173 | help="Tortoise-ORM config module dict variable, like settings.TORTOISE_ORM.", 174 | ) 175 | @click.option( 176 | "--location", 177 | default="./migrations", 178 | show_default=True, 179 | help="Migrate store location.", 180 | ) 181 | @click.option( 182 | "-s", 183 | "--src_folder", 184 | default=CONFIG_DEFAULT_VALUES["src_folder"], 185 | show_default=False, 186 | help="Folder of the source, relative to the project root.", 187 | ) 188 | @click.pass_context 189 | @coro 190 | async def init(ctx: Context, tortoise_orm, location, src_folder): 191 | config_file = ctx.obj["config_file"] 192 | 193 | if os.path.isabs(src_folder): 194 | src_folder = os.path.relpath(os.getcwd(), src_folder) 195 | # Add ./ so it's clear that this is relative path 196 | if not src_folder.startswith("./"): 197 | src_folder = "./" + src_folder 198 | 199 | # check that we can find the configuration, if not we can fail before the config file gets created 200 | add_src_path(src_folder) 201 | get_tortoise_config(ctx, tortoise_orm) 202 | config_path = Path(config_file) 203 | if config_path.exists(): 204 | content = config_path.read_text() 205 | doc = tomlkit.parse(content) 206 | else: 207 | doc = tomlkit.parse("[tool.aerich]") 208 | table = tomlkit.table() 209 | table["tortoise_orm"] = tortoise_orm 210 | table["location"] = location 211 | table["src_folder"] = src_folder 212 | doc["tool"]["aerich"] = table 213 | 214 | config_path.write_text(tomlkit.dumps(doc)) 215 | 216 | Path(location).mkdir(parents=True, exist_ok=True) 217 | 218 | click.secho(f"Success create migrate location {location}", fg=Color.green) 219 | click.secho(f"Success write config to {config_file}", fg=Color.green) 220 | 221 | 222 | @cli.command(help="Generate schema and generate app migrate location.") 223 | @click.option( 224 | "-s", 225 | "--safe", 226 | type=bool, 227 | is_flag=True, 228 | default=True, 229 | help="When set to true, creates the table only when it does not already exist.", 230 | show_default=True, 231 | ) 232 | @click.pass_context 233 | @coro 234 | async def init_db(ctx: Context, safe: bool): 235 | command = ctx.obj["command"] 236 | app = command.app 237 | dirname = Path(command.location, app) 238 | try: 239 | await command.init_db(safe) 240 | click.secho(f"Success create app migrate location {dirname}", fg=Color.green) 241 | click.secho(f'Success generate schema for app "{app}"', fg=Color.green) 242 | except FileExistsError: 243 | return click.secho( 244 | f"Inited {app} already, or delete {dirname} and try again.", fg=Color.yellow 245 | ) 246 | finally: 247 | if dirname.is_dir() and not any(dirname.iterdir()): 248 | dirname.rmdir() 249 | 250 | 251 | @cli.command(help="Introspects the database tables to standard output as TortoiseORM model.") 252 | @click.option( 253 | "-t", 254 | "--table", 255 | help="Which tables to inspect.", 256 | multiple=True, 257 | required=False, 258 | ) 259 | @click.pass_context 260 | @coro 261 | async def inspectdb(ctx: Context, table: List[str]): 262 | command = ctx.obj["command"] 263 | ret = await command.inspectdb(table) 264 | click.secho(ret) 265 | 266 | 267 | def main(): 268 | cli() 269 | 270 | 271 | if __name__ == "__main__": 272 | main() 273 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright 2020 long2ice 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /aerich/ddl/__init__.py: -------------------------------------------------------------------------------- 1 | from enum import Enum 2 | from typing import List, Type 3 | 4 | from tortoise import BaseDBAsyncClient, Model 5 | from tortoise.backends.base.schema_generator import BaseSchemaGenerator 6 | 7 | from aerich.utils import is_default_function 8 | 9 | 10 | class BaseDDL: 11 | schema_generator_cls: Type[BaseSchemaGenerator] = BaseSchemaGenerator 12 | DIALECT = "sql" 13 | _DROP_TABLE_TEMPLATE = 'DROP TABLE IF EXISTS "{table_name}"' 14 | _ADD_COLUMN_TEMPLATE = 'ALTER TABLE "{table_name}" ADD {column}' 15 | _DROP_COLUMN_TEMPLATE = 'ALTER TABLE "{table_name}" DROP COLUMN "{column_name}"' 16 | _ALTER_DEFAULT_TEMPLATE = 'ALTER TABLE "{table_name}" ALTER COLUMN "{column}" {default}' 17 | _RENAME_COLUMN_TEMPLATE = ( 18 | 'ALTER TABLE "{table_name}" RENAME COLUMN "{old_column_name}" TO "{new_column_name}"' 19 | ) 20 | _ADD_INDEX_TEMPLATE = ( 21 | 'ALTER TABLE "{table_name}" ADD {unique}INDEX "{index_name}" ({column_names})' 22 | ) 23 | _DROP_INDEX_TEMPLATE = 'ALTER TABLE "{table_name}" DROP INDEX "{index_name}"' 24 | _ADD_FK_TEMPLATE = 'ALTER TABLE "{table_name}" ADD CONSTRAINT "{fk_name}" FOREIGN KEY ("{db_column}") REFERENCES "{table}" ("{field}") ON DELETE {on_delete}' 25 | _DROP_FK_TEMPLATE = 'ALTER TABLE "{table_name}" DROP FOREIGN KEY "{fk_name}"' 26 | _M2M_TABLE_TEMPLATE = ( 27 | 'CREATE TABLE "{table_name}" (\n' 28 | ' "{backward_key}" {backward_type} NOT NULL REFERENCES "{backward_table}" ("{backward_field}") ON DELETE CASCADE,\n' 29 | ' "{forward_key}" {forward_type} NOT NULL REFERENCES "{forward_table}" ("{forward_field}") ON DELETE {on_delete}\n' 30 | "){extra}{comment}" 31 | ) 32 | _MODIFY_COLUMN_TEMPLATE = 'ALTER TABLE "{table_name}" MODIFY COLUMN {column}' 33 | _CHANGE_COLUMN_TEMPLATE = ( 34 | 'ALTER TABLE "{table_name}" CHANGE {old_column_name} {new_column_name} {new_column_type}' 35 | ) 36 | _RENAME_TABLE_TEMPLATE = 'ALTER TABLE "{old_table_name}" RENAME TO "{new_table_name}"' 37 | 38 | def __init__(self, client: "BaseDBAsyncClient"): 39 | self.client = client 40 | self.schema_generator = self.schema_generator_cls(client) 41 | 42 | def create_table(self, model: "Type[Model]"): 43 | return self.schema_generator._get_table_sql(model, True)["table_creation_string"].rstrip( 44 | ";" 45 | ) 46 | 47 | def drop_table(self, table_name: str): 48 | return self._DROP_TABLE_TEMPLATE.format(table_name=table_name) 49 | 50 | def create_m2m( 51 | self, model: "Type[Model]", field_describe: dict, reference_table_describe: dict 52 | ): 53 | through = field_describe.get("through") 54 | description = field_describe.get("description") 55 | reference_id = reference_table_describe.get("pk_field").get("db_column") 56 | db_field_types = reference_table_describe.get("pk_field").get("db_field_types") 57 | return self._M2M_TABLE_TEMPLATE.format( 58 | table_name=through, 59 | backward_table=model._meta.db_table, 60 | forward_table=reference_table_describe.get("table"), 61 | backward_field=model._meta.db_pk_column, 62 | forward_field=reference_id, 63 | backward_key=field_describe.get("backward_key"), 64 | backward_type=model._meta.pk.get_for_dialect(self.DIALECT, "SQL_TYPE"), 65 | forward_key=field_describe.get("forward_key"), 66 | forward_type=db_field_types.get(self.DIALECT) or db_field_types.get(""), 67 | on_delete=field_describe.get("on_delete"), 68 | extra=self.schema_generator._table_generate_extra(table=through), 69 | comment=self.schema_generator._table_comment_generator( 70 | table=through, comment=description 71 | ) 72 | if description 73 | else "", 74 | ) 75 | 76 | def drop_m2m(self, table_name: str): 77 | return self._DROP_TABLE_TEMPLATE.format(table_name=table_name) 78 | 79 | def _get_default(self, model: "Type[Model]", field_describe: dict): 80 | db_table = model._meta.db_table 81 | default = field_describe.get("default") 82 | if isinstance(default, Enum): 83 | default = default.value 84 | db_column = field_describe.get("db_column") 85 | auto_now_add = field_describe.get("auto_now_add", False) 86 | auto_now = field_describe.get("auto_now", False) 87 | if default is not None or auto_now_add: 88 | if field_describe.get("field_type") in [ 89 | "UUIDField", 90 | "TextField", 91 | "JSONField", 92 | ] or is_default_function(default): 93 | default = "" 94 | else: 95 | try: 96 | default = self.schema_generator._column_default_generator( 97 | db_table, 98 | db_column, 99 | self.schema_generator._escape_default_value(default), 100 | auto_now_add, 101 | auto_now, 102 | ) 103 | except NotImplementedError: 104 | default = "" 105 | else: 106 | default = None 107 | return default 108 | 109 | def add_column(self, model: "Type[Model]", field_describe: dict, is_pk: bool = False): 110 | db_table = model._meta.db_table 111 | description = field_describe.get("description") 112 | db_column = field_describe.get("db_column") 113 | db_field_types = field_describe.get("db_field_types") 114 | default = self._get_default(model, field_describe) 115 | if default is None: 116 | default = "" 117 | return self._ADD_COLUMN_TEMPLATE.format( 118 | table_name=db_table, 119 | column=self.schema_generator._create_string( 120 | db_column=db_column, 121 | field_type=db_field_types.get(self.DIALECT, db_field_types.get("")), 122 | nullable="NOT NULL" if not field_describe.get("nullable") else "", 123 | unique="UNIQUE" if field_describe.get("unique") else "", 124 | comment=self.schema_generator._column_comment_generator( 125 | table=db_table, 126 | column=db_column, 127 | comment=field_describe.get("description"), 128 | ) 129 | if description 130 | else "", 131 | is_primary_key=is_pk, 132 | default=default, 133 | ), 134 | ) 135 | 136 | def drop_column(self, model: "Type[Model]", column_name: str): 137 | return self._DROP_COLUMN_TEMPLATE.format( 138 | table_name=model._meta.db_table, column_name=column_name 139 | ) 140 | 141 | def modify_column(self, model: "Type[Model]", field_describe: dict, is_pk: bool = False): 142 | db_table = model._meta.db_table 143 | db_field_types = field_describe.get("db_field_types") 144 | default = self._get_default(model, field_describe) 145 | if default is None: 146 | default = "" 147 | return self._MODIFY_COLUMN_TEMPLATE.format( 148 | table_name=db_table, 149 | column=self.schema_generator._create_string( 150 | db_column=field_describe.get("db_column"), 151 | field_type=db_field_types.get(self.DIALECT) or db_field_types.get(""), 152 | nullable="NOT NULL" if not field_describe.get("nullable") else "", 153 | unique="", 154 | comment=self.schema_generator._column_comment_generator( 155 | table=db_table, 156 | column=field_describe.get("db_column"), 157 | comment=field_describe.get("description"), 158 | ) 159 | if field_describe.get("description") 160 | else "", 161 | is_primary_key=is_pk, 162 | default=default, 163 | ), 164 | ) 165 | 166 | def rename_column(self, model: "Type[Model]", old_column_name: str, new_column_name: str): 167 | return self._RENAME_COLUMN_TEMPLATE.format( 168 | table_name=model._meta.db_table, 169 | old_column_name=old_column_name, 170 | new_column_name=new_column_name, 171 | ) 172 | 173 | def change_column( 174 | self, model: "Type[Model]", old_column_name: str, new_column_name: str, new_column_type: str 175 | ): 176 | return self._CHANGE_COLUMN_TEMPLATE.format( 177 | table_name=model._meta.db_table, 178 | old_column_name=old_column_name, 179 | new_column_name=new_column_name, 180 | new_column_type=new_column_type, 181 | ) 182 | 183 | def add_index(self, model: "Type[Model]", field_names: List[str], unique=False): 184 | return self._ADD_INDEX_TEMPLATE.format( 185 | unique="UNIQUE " if unique else "", 186 | index_name=self.schema_generator._generate_index_name( 187 | "idx" if not unique else "uid", model, field_names 188 | ), 189 | table_name=model._meta.db_table, 190 | column_names=", ".join(self.schema_generator.quote(f) for f in field_names), 191 | ) 192 | 193 | def drop_index(self, model: "Type[Model]", field_names: List[str], unique=False): 194 | return self._DROP_INDEX_TEMPLATE.format( 195 | index_name=self.schema_generator._generate_index_name( 196 | "idx" if not unique else "uid", model, field_names 197 | ), 198 | table_name=model._meta.db_table, 199 | ) 200 | 201 | def drop_index_by_name(self, model: "Type[Model]", index_name: str): 202 | return self._DROP_INDEX_TEMPLATE.format( 203 | index_name=index_name, 204 | table_name=model._meta.db_table, 205 | ) 206 | 207 | def add_fk(self, model: "Type[Model]", field_describe: dict, reference_table_describe: dict): 208 | db_table = model._meta.db_table 209 | 210 | db_column = field_describe.get("raw_field") 211 | reference_id = reference_table_describe.get("pk_field").get("db_column") 212 | fk_name = self.schema_generator._generate_fk_name( 213 | from_table=db_table, 214 | from_field=db_column, 215 | to_table=reference_table_describe.get("table"), 216 | to_field=reference_table_describe.get("pk_field").get("db_column"), 217 | ) 218 | return self._ADD_FK_TEMPLATE.format( 219 | table_name=db_table, 220 | fk_name=fk_name, 221 | db_column=db_column, 222 | table=reference_table_describe.get("table"), 223 | field=reference_id, 224 | on_delete=field_describe.get("on_delete"), 225 | ) 226 | 227 | def drop_fk(self, model: "Type[Model]", field_describe: dict, reference_table_describe: dict): 228 | db_table = model._meta.db_table 229 | return self._DROP_FK_TEMPLATE.format( 230 | table_name=db_table, 231 | fk_name=self.schema_generator._generate_fk_name( 232 | from_table=db_table, 233 | from_field=field_describe.get("raw_field"), 234 | to_table=reference_table_describe.get("table"), 235 | to_field=reference_table_describe.get("pk_field").get("db_column"), 236 | ), 237 | ) 238 | 239 | def alter_column_default(self, model: "Type[Model]", field_describe: dict): 240 | db_table = model._meta.db_table 241 | default = self._get_default(model, field_describe) 242 | return self._ALTER_DEFAULT_TEMPLATE.format( 243 | table_name=db_table, 244 | column=field_describe.get("db_column"), 245 | default="SET" + default if default is not None else "DROP DEFAULT", 246 | ) 247 | 248 | def alter_column_null(self, model: "Type[Model]", field_describe: dict): 249 | return self.modify_column(model, field_describe) 250 | 251 | def set_comment(self, model: "Type[Model]", field_describe: dict): 252 | return self.modify_column(model, field_describe) 253 | 254 | def rename_table(self, model: "Type[Model]", old_table_name: str, new_table_name: str): 255 | db_table = model._meta.db_table 256 | return self._RENAME_TABLE_TEMPLATE.format( 257 | table_name=db_table, old_table_name=old_table_name, new_table_name=new_table_name 258 | ) 259 | -------------------------------------------------------------------------------- /aerich/migrate.py: -------------------------------------------------------------------------------- 1 | import importlib 2 | import os 3 | from datetime import datetime 4 | from hashlib import md5 5 | from pathlib import Path 6 | from typing import Dict, List, Optional, Tuple, Type, Union 7 | 8 | import click 9 | from dictdiffer import diff 10 | from tortoise import BaseDBAsyncClient, Model, Tortoise 11 | from tortoise.exceptions import OperationalError 12 | from tortoise.indexes import Index 13 | 14 | from aerich.ddl import BaseDDL 15 | from aerich.models import MAX_VERSION_LENGTH, Aerich 16 | from aerich.utils import get_app_connection, get_models_describe, is_default_function 17 | 18 | MIGRATE_TEMPLATE = """from tortoise import BaseDBAsyncClient 19 | 20 | 21 | async def upgrade(db: BaseDBAsyncClient) -> str: 22 | return \"\"\" 23 | {upgrade_sql}\"\"\" 24 | 25 | 26 | async def downgrade(db: BaseDBAsyncClient) -> str: 27 | return \"\"\" 28 | {downgrade_sql}\"\"\" 29 | """ 30 | 31 | 32 | class Migrate: 33 | upgrade_operators: List[str] = [] 34 | downgrade_operators: List[str] = [] 35 | _upgrade_fk_m2m_index_operators: List[str] = [] 36 | _downgrade_fk_m2m_index_operators: List[str] = [] 37 | _upgrade_m2m: List[str] = [] 38 | _downgrade_m2m: List[str] = [] 39 | _aerich = Aerich.__name__ 40 | _rename_old = [] 41 | _rename_new = [] 42 | 43 | ddl: BaseDDL 44 | _last_version_content: Optional[dict] = None 45 | app: str 46 | migrate_location: Path 47 | dialect: str 48 | _db_version: Optional[str] = None 49 | 50 | @classmethod 51 | def get_all_version_files(cls) -> List[str]: 52 | return sorted( 53 | filter(lambda x: x.endswith("py"), os.listdir(cls.migrate_location)), 54 | key=lambda x: int(x.split("_")[0]), 55 | ) 56 | 57 | @classmethod 58 | def _get_model(cls, model: str) -> Type[Model]: 59 | return Tortoise.apps.get(cls.app).get(model) 60 | 61 | @classmethod 62 | async def get_last_version(cls) -> Optional[Aerich]: 63 | try: 64 | return await Aerich.filter(app=cls.app).first() 65 | except OperationalError: 66 | pass 67 | 68 | @classmethod 69 | async def _get_db_version(cls, connection: BaseDBAsyncClient): 70 | if cls.dialect == "mysql": 71 | sql = "select version() as version" 72 | ret = await connection.execute_query(sql) 73 | cls._db_version = ret[1][0].get("version") 74 | 75 | @classmethod 76 | async def load_ddl_class(cls): 77 | ddl_dialect_module = importlib.import_module(f"aerich.ddl.{cls.dialect}") 78 | return getattr(ddl_dialect_module, f"{cls.dialect.capitalize()}DDL") 79 | 80 | @classmethod 81 | async def init(cls, config: dict, app: str, location: str): 82 | await Tortoise.init(config=config) 83 | last_version = await cls.get_last_version() 84 | cls.app = app 85 | cls.migrate_location = Path(location, app) 86 | if last_version: 87 | cls._last_version_content = last_version.content 88 | 89 | connection = get_app_connection(config, app) 90 | cls.dialect = connection.schema_generator.DIALECT 91 | cls.ddl_class = await cls.load_ddl_class() 92 | cls.ddl = cls.ddl_class(connection) 93 | await cls._get_db_version(connection) 94 | 95 | @classmethod 96 | async def _get_last_version_num(cls): 97 | last_version = await cls.get_last_version() 98 | if not last_version: 99 | return None 100 | version = last_version.version 101 | return int(version.split("_", 1)[0]) 102 | 103 | @classmethod 104 | async def generate_version(cls, name=None): 105 | now = datetime.now().strftime("%Y%m%d%H%M%S").replace("/", "") 106 | last_version_num = await cls._get_last_version_num() 107 | if last_version_num is None: 108 | return f"0_{now}_init.py" 109 | version = f"{last_version_num + 1}_{now}_{name}.py" 110 | if len(version) > MAX_VERSION_LENGTH: 111 | raise ValueError(f"Version name exceeds maximum length ({MAX_VERSION_LENGTH})") 112 | return version 113 | 114 | @classmethod 115 | async def _generate_diff_py(cls, name): 116 | version = await cls.generate_version(name) 117 | # delete if same version exists 118 | for version_file in cls.get_all_version_files(): 119 | if version_file.startswith(version.split("_")[0]): 120 | os.unlink(Path(cls.migrate_location, version_file)) 121 | 122 | version_file = Path(cls.migrate_location, version) 123 | content = MIGRATE_TEMPLATE.format( 124 | upgrade_sql=";\n ".join(cls.upgrade_operators) + ";", 125 | downgrade_sql=";\n ".join(cls.downgrade_operators) + ";", 126 | ) 127 | 128 | with open(version_file, "w", encoding="utf-8") as f: 129 | f.write(content) 130 | return version 131 | 132 | @classmethod 133 | async def migrate(cls, name) -> str: 134 | """ 135 | diff old models and new models to generate diff content 136 | :param name: 137 | :return: 138 | """ 139 | new_version_content = get_models_describe(cls.app) 140 | cls.diff_models(cls._last_version_content, new_version_content) 141 | cls.diff_models(new_version_content, cls._last_version_content, False) 142 | 143 | cls._merge_operators() 144 | 145 | if not cls.upgrade_operators: 146 | return "" 147 | 148 | return await cls._generate_diff_py(name) 149 | 150 | @classmethod 151 | def _add_operator(cls, operator: str, upgrade=True, fk_m2m_index=False): 152 | """ 153 | add operator,differentiate fk because fk is order limit 154 | :param operator: 155 | :param upgrade: 156 | :param fk_m2m_index: 157 | :return: 158 | """ 159 | operator = operator.rstrip(";") 160 | if upgrade: 161 | if fk_m2m_index: 162 | cls._upgrade_fk_m2m_index_operators.append(operator) 163 | else: 164 | cls.upgrade_operators.append(operator) 165 | else: 166 | if fk_m2m_index: 167 | cls._downgrade_fk_m2m_index_operators.append(operator) 168 | else: 169 | cls.downgrade_operators.append(operator) 170 | 171 | @classmethod 172 | def _handle_indexes(cls, model: Type[Model], indexes: List[Union[Tuple[str], Index]]): 173 | ret = [] 174 | for index in indexes: 175 | if isinstance(index, Index): 176 | index.__hash__ = lambda self: md5( # nosec: B303 177 | self.index_name(cls.ddl.schema_generator, model).encode() 178 | + self.__class__.__name__.encode() 179 | ).hexdigest() 180 | ret.append(index) 181 | return ret 182 | 183 | @classmethod 184 | def diff_models(cls, old_models: Dict[str, dict], new_models: Dict[str, dict], upgrade=True): 185 | """ 186 | diff models and add operators 187 | :param old_models: 188 | :param new_models: 189 | :param upgrade: 190 | :return: 191 | """ 192 | _aerich = f"{cls.app}.{cls._aerich}" 193 | old_models.pop(_aerich, None) 194 | new_models.pop(_aerich, None) 195 | 196 | for new_model_str, new_model_describe in new_models.items(): 197 | model = cls._get_model(new_model_describe.get("name").split(".")[1]) 198 | 199 | if new_model_str not in old_models.keys(): 200 | if upgrade: 201 | cls._add_operator(cls.add_model(model), upgrade) 202 | else: 203 | # we can't find origin model when downgrade, so skip 204 | pass 205 | else: 206 | old_model_describe = old_models.get(new_model_str) 207 | # rename table 208 | new_table = new_model_describe.get("table") 209 | old_table = old_model_describe.get("table") 210 | if new_table != old_table: 211 | cls._add_operator(cls.rename_table(model, old_table, new_table), upgrade) 212 | old_unique_together = set( 213 | map(lambda x: tuple(x), old_model_describe.get("unique_together")) 214 | ) 215 | new_unique_together = set( 216 | map(lambda x: tuple(x), new_model_describe.get("unique_together")) 217 | ) 218 | old_indexes = set( 219 | map( 220 | lambda x: x if isinstance(x, Index) else tuple(x), 221 | cls._handle_indexes(model, old_model_describe.get("indexes", [])), 222 | ) 223 | ) 224 | new_indexes = set( 225 | map( 226 | lambda x: x if isinstance(x, Index) else tuple(x), 227 | cls._handle_indexes(model, new_model_describe.get("indexes", [])), 228 | ) 229 | ) 230 | old_pk_field = old_model_describe.get("pk_field") 231 | new_pk_field = new_model_describe.get("pk_field") 232 | # pk field 233 | changes = diff(old_pk_field, new_pk_field) 234 | for action, option, change in changes: 235 | # current only support rename pk 236 | if action == "change" and option == "name": 237 | cls._add_operator(cls._rename_field(model, *change), upgrade) 238 | # m2m fields 239 | old_m2m_fields = old_model_describe.get("m2m_fields") 240 | new_m2m_fields = sorted( 241 | new_model_describe.get("m2m_fields"), 242 | key=lambda field: old_m2m_fields.index(field) 243 | if field in old_m2m_fields 244 | else len(old_m2m_fields), 245 | ) 246 | for action, option, change in diff(old_m2m_fields, new_m2m_fields): 247 | if change[0][0] == "db_constraint": 248 | continue 249 | table = change[0][1].get("through") 250 | if action == "add": 251 | add = False 252 | if upgrade and table not in cls._upgrade_m2m: 253 | cls._upgrade_m2m.append(table) 254 | add = True 255 | elif not upgrade and table not in cls._downgrade_m2m: 256 | cls._downgrade_m2m.append(table) 257 | add = True 258 | if add: 259 | cls._add_operator( 260 | cls.create_m2m( 261 | model, 262 | change[0][1], 263 | new_models.get(change[0][1].get("model_name")), 264 | ), 265 | upgrade, 266 | fk_m2m_index=True, 267 | ) 268 | elif action == "remove": 269 | add = False 270 | if upgrade and table not in cls._upgrade_m2m: 271 | cls._upgrade_m2m.append(table) 272 | add = True 273 | elif not upgrade and table not in cls._downgrade_m2m: 274 | cls._downgrade_m2m.append(table) 275 | add = True 276 | if add: 277 | cls._add_operator(cls.drop_m2m(table), upgrade, True) 278 | # add unique_together 279 | for index in new_unique_together.difference(old_unique_together): 280 | cls._add_operator(cls._add_index(model, index, True), upgrade, True) 281 | # remove unique_together 282 | for index in old_unique_together.difference(new_unique_together): 283 | cls._add_operator(cls._drop_index(model, index, True), upgrade, True) 284 | # add indexes 285 | for index in new_indexes.difference(old_indexes): 286 | cls._add_operator(cls._add_index(model, index, False), upgrade, True) 287 | # remove indexes 288 | for index in old_indexes.difference(new_indexes): 289 | cls._add_operator(cls._drop_index(model, index, False), upgrade, True) 290 | old_data_fields = list( 291 | filter( 292 | lambda x: x.get("db_field_types") is not None, 293 | old_model_describe.get("data_fields"), 294 | ) 295 | ) 296 | new_data_fields = list( 297 | filter( 298 | lambda x: x.get("db_field_types") is not None, 299 | new_model_describe.get("data_fields"), 300 | ) 301 | ) 302 | 303 | old_data_fields_name = list(map(lambda x: x.get("name"), old_data_fields)) 304 | new_data_fields_name = list(map(lambda x: x.get("name"), new_data_fields)) 305 | 306 | # add fields or rename fields 307 | for new_data_field_name in set(new_data_fields_name).difference( 308 | set(old_data_fields_name) 309 | ): 310 | new_data_field = next( 311 | filter(lambda x: x.get("name") == new_data_field_name, new_data_fields) 312 | ) 313 | is_rename = False 314 | for old_data_field in old_data_fields: 315 | changes = list(diff(old_data_field, new_data_field)) 316 | old_data_field_name = old_data_field.get("name") 317 | if len(changes) == 2: 318 | # rename field 319 | if ( 320 | changes[0] 321 | == ( 322 | "change", 323 | "name", 324 | (old_data_field_name, new_data_field_name), 325 | ) 326 | and changes[1] 327 | == ( 328 | "change", 329 | "db_column", 330 | ( 331 | old_data_field.get("db_column"), 332 | new_data_field.get("db_column"), 333 | ), 334 | ) 335 | and old_data_field_name not in new_data_fields_name 336 | ): 337 | if upgrade: 338 | is_rename = click.prompt( 339 | f"Rename {old_data_field_name} to {new_data_field_name}?", 340 | default=True, 341 | type=bool, 342 | show_choices=True, 343 | ) 344 | else: 345 | is_rename = old_data_field_name in cls._rename_new 346 | if is_rename: 347 | cls._rename_new.append(new_data_field_name) 348 | cls._rename_old.append(old_data_field_name) 349 | # only MySQL8+ has rename syntax 350 | if ( 351 | cls.dialect == "mysql" 352 | and cls._db_version 353 | and cls._db_version.startswith("5.") 354 | ): 355 | cls._add_operator( 356 | cls._change_field( 357 | model, old_data_field, new_data_field 358 | ), 359 | upgrade, 360 | ) 361 | else: 362 | cls._add_operator( 363 | cls._rename_field(model, *changes[1][2]), 364 | upgrade, 365 | ) 366 | if not is_rename: 367 | cls._add_operator( 368 | cls._add_field( 369 | model, 370 | new_data_field, 371 | ), 372 | upgrade, 373 | ) 374 | if new_data_field["indexed"]: 375 | cls._add_operator( 376 | cls._add_index( 377 | model, {new_data_field["db_column"]}, new_data_field["unique"] 378 | ), 379 | upgrade, 380 | True, 381 | ) 382 | # remove fields 383 | for old_data_field_name in set(old_data_fields_name).difference( 384 | set(new_data_fields_name) 385 | ): 386 | # don't remove field if is renamed 387 | if (upgrade and old_data_field_name in cls._rename_old) or ( 388 | not upgrade and old_data_field_name in cls._rename_new 389 | ): 390 | continue 391 | old_data_field = next( 392 | filter(lambda x: x.get("name") == old_data_field_name, old_data_fields) 393 | ) 394 | db_column = old_data_field["db_column"] 395 | cls._add_operator( 396 | cls._remove_field( 397 | model, 398 | db_column, 399 | ), 400 | upgrade, 401 | ) 402 | if old_data_field["indexed"]: 403 | cls._add_operator( 404 | cls._drop_index( 405 | model, 406 | {db_column}, 407 | ), 408 | upgrade, 409 | True, 410 | ) 411 | 412 | old_fk_fields = old_model_describe.get("fk_fields") 413 | new_fk_fields = new_model_describe.get("fk_fields") 414 | 415 | old_fk_fields_name = list(map(lambda x: x.get("name"), old_fk_fields)) 416 | new_fk_fields_name = list(map(lambda x: x.get("name"), new_fk_fields)) 417 | 418 | # add fk 419 | for new_fk_field_name in set(new_fk_fields_name).difference( 420 | set(old_fk_fields_name) 421 | ): 422 | fk_field = next( 423 | filter(lambda x: x.get("name") == new_fk_field_name, new_fk_fields) 424 | ) 425 | if fk_field.get("db_constraint"): 426 | cls._add_operator( 427 | cls._add_fk( 428 | model, fk_field, new_models.get(fk_field.get("python_type")) 429 | ), 430 | upgrade, 431 | fk_m2m_index=True, 432 | ) 433 | # drop fk 434 | for old_fk_field_name in set(old_fk_fields_name).difference( 435 | set(new_fk_fields_name) 436 | ): 437 | old_fk_field = next( 438 | filter(lambda x: x.get("name") == old_fk_field_name, old_fk_fields) 439 | ) 440 | if old_fk_field.get("db_constraint"): 441 | cls._add_operator( 442 | cls._drop_fk( 443 | model, old_fk_field, old_models.get(old_fk_field.get("python_type")) 444 | ), 445 | upgrade, 446 | fk_m2m_index=True, 447 | ) 448 | # change fields 449 | for field_name in set(new_data_fields_name).intersection(set(old_data_fields_name)): 450 | old_data_field = next( 451 | filter(lambda x: x.get("name") == field_name, old_data_fields) 452 | ) 453 | new_data_field = next( 454 | filter(lambda x: x.get("name") == field_name, new_data_fields) 455 | ) 456 | changes = diff(old_data_field, new_data_field) 457 | modified = False 458 | for change in changes: 459 | _, option, old_new = change 460 | if option == "indexed": 461 | # change index 462 | unique = new_data_field.get("unique") 463 | if old_new[0] is False and old_new[1] is True: 464 | cls._add_operator( 465 | cls._add_index(model, (field_name,), unique), upgrade, True 466 | ) 467 | else: 468 | cls._add_operator( 469 | cls._drop_index(model, (field_name,), unique), upgrade, True 470 | ) 471 | elif option == "db_field_types.": 472 | if new_data_field.get("field_type") == "DecimalField": 473 | # modify column 474 | cls._add_operator( 475 | cls._modify_field(model, new_data_field), 476 | upgrade, 477 | ) 478 | else: 479 | continue 480 | elif option == "default": 481 | if not ( 482 | is_default_function(old_new[0]) or is_default_function(old_new[1]) 483 | ): 484 | # change column default 485 | cls._add_operator( 486 | cls._alter_default(model, new_data_field), upgrade 487 | ) 488 | elif option == "unique": 489 | # because indexed include it 490 | continue 491 | elif option == "nullable": 492 | # change nullable 493 | cls._add_operator(cls._alter_null(model, new_data_field), upgrade) 494 | else: 495 | if modified: 496 | continue 497 | # modify column 498 | cls._add_operator( 499 | cls._modify_field(model, new_data_field), 500 | upgrade, 501 | ) 502 | modified = True 503 | 504 | for old_model in old_models: 505 | if old_model not in new_models.keys(): 506 | cls._add_operator(cls.drop_model(old_models.get(old_model).get("table")), upgrade) 507 | 508 | @classmethod 509 | def rename_table(cls, model: Type[Model], old_table_name: str, new_table_name: str): 510 | return cls.ddl.rename_table(model, old_table_name, new_table_name) 511 | 512 | @classmethod 513 | def add_model(cls, model: Type[Model]): 514 | return cls.ddl.create_table(model) 515 | 516 | @classmethod 517 | def drop_model(cls, table_name: str): 518 | return cls.ddl.drop_table(table_name) 519 | 520 | @classmethod 521 | def create_m2m(cls, model: Type[Model], field_describe: dict, reference_table_describe: dict): 522 | return cls.ddl.create_m2m(model, field_describe, reference_table_describe) 523 | 524 | @classmethod 525 | def drop_m2m(cls, table_name: str): 526 | return cls.ddl.drop_m2m(table_name) 527 | 528 | @classmethod 529 | def _resolve_fk_fields_name(cls, model: Type[Model], fields_name: Tuple[str]): 530 | ret = [] 531 | for field_name in fields_name: 532 | field = model._meta.fields_map[field_name] 533 | if field.source_field: 534 | ret.append(field.source_field) 535 | elif field_name in model._meta.fk_fields: 536 | ret.append(field_name + "_id") 537 | else: 538 | ret.append(field_name) 539 | return ret 540 | 541 | @classmethod 542 | def _drop_index(cls, model: Type[Model], fields_name: Union[Tuple[str], Index], unique=False): 543 | if isinstance(fields_name, Index): 544 | return cls.ddl.drop_index_by_name( 545 | model, fields_name.index_name(cls.ddl.schema_generator, model) 546 | ) 547 | fields_name = cls._resolve_fk_fields_name(model, fields_name) 548 | return cls.ddl.drop_index(model, fields_name, unique) 549 | 550 | @classmethod 551 | def _add_index(cls, model: Type[Model], fields_name: Union[Tuple[str], Index], unique=False): 552 | if isinstance(fields_name, Index): 553 | return fields_name.get_sql(cls.ddl.schema_generator, model, False) 554 | fields_name = cls._resolve_fk_fields_name(model, fields_name) 555 | return cls.ddl.add_index(model, fields_name, unique) 556 | 557 | @classmethod 558 | def _add_field(cls, model: Type[Model], field_describe: dict, is_pk: bool = False): 559 | return cls.ddl.add_column(model, field_describe, is_pk) 560 | 561 | @classmethod 562 | def _alter_default(cls, model: Type[Model], field_describe: dict): 563 | return cls.ddl.alter_column_default(model, field_describe) 564 | 565 | @classmethod 566 | def _alter_null(cls, model: Type[Model], field_describe: dict): 567 | return cls.ddl.alter_column_null(model, field_describe) 568 | 569 | @classmethod 570 | def _set_comment(cls, model: Type[Model], field_describe: dict): 571 | return cls.ddl.set_comment(model, field_describe) 572 | 573 | @classmethod 574 | def _modify_field(cls, model: Type[Model], field_describe: dict): 575 | return cls.ddl.modify_column(model, field_describe) 576 | 577 | @classmethod 578 | def _drop_fk(cls, model: Type[Model], field_describe: dict, reference_table_describe: dict): 579 | return cls.ddl.drop_fk(model, field_describe, reference_table_describe) 580 | 581 | @classmethod 582 | def _remove_field(cls, model: Type[Model], column_name: str): 583 | return cls.ddl.drop_column(model, column_name) 584 | 585 | @classmethod 586 | def _rename_field(cls, model: Type[Model], old_field_name: str, new_field_name: str): 587 | return cls.ddl.rename_column(model, old_field_name, new_field_name) 588 | 589 | @classmethod 590 | def _change_field(cls, model: Type[Model], old_field_describe: dict, new_field_describe: dict): 591 | db_field_types = new_field_describe.get("db_field_types") 592 | return cls.ddl.change_column( 593 | model, 594 | old_field_describe.get("db_column"), 595 | new_field_describe.get("db_column"), 596 | db_field_types.get(cls.dialect) or db_field_types.get(""), 597 | ) 598 | 599 | @classmethod 600 | def _add_fk(cls, model: Type[Model], field_describe: dict, reference_table_describe: dict): 601 | """ 602 | add fk 603 | :param model: 604 | :param field_describe: 605 | :param reference_table_describe: 606 | :return: 607 | """ 608 | return cls.ddl.add_fk(model, field_describe, reference_table_describe) 609 | 610 | @classmethod 611 | def _merge_operators(cls): 612 | """ 613 | fk/m2m/index must be last when add,first when drop 614 | :return: 615 | """ 616 | for _upgrade_fk_m2m_operator in cls._upgrade_fk_m2m_index_operators: 617 | if "ADD" in _upgrade_fk_m2m_operator or "CREATE" in _upgrade_fk_m2m_operator: 618 | cls.upgrade_operators.append(_upgrade_fk_m2m_operator) 619 | else: 620 | cls.upgrade_operators.insert(0, _upgrade_fk_m2m_operator) 621 | 622 | for _downgrade_fk_m2m_operator in cls._downgrade_fk_m2m_index_operators: 623 | if "ADD" in _downgrade_fk_m2m_operator or "CREATE" in _downgrade_fk_m2m_operator: 624 | cls.downgrade_operators.append(_downgrade_fk_m2m_operator) 625 | else: 626 | cls.downgrade_operators.insert(0, _downgrade_fk_m2m_operator) 627 | -------------------------------------------------------------------------------- /tests/test_migrate.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from pytest_mock import MockerFixture 3 | 4 | from aerich.ddl.mysql import MysqlDDL 5 | from aerich.ddl.postgres import PostgresDDL 6 | from aerich.ddl.sqlite import SqliteDDL 7 | from aerich.exceptions import NotSupportError 8 | from aerich.migrate import Migrate 9 | from aerich.utils import get_models_describe 10 | 11 | old_models_describe = { 12 | "models.Category": { 13 | "name": "models.Category", 14 | "app": "models", 15 | "table": "category", 16 | "abstract": False, 17 | "description": None, 18 | "docstring": None, 19 | "unique_together": [], 20 | "indexes": [], 21 | "pk_field": { 22 | "name": "id", 23 | "field_type": "IntField", 24 | "db_column": "id", 25 | "python_type": "int", 26 | "generated": True, 27 | "nullable": False, 28 | "unique": True, 29 | "indexed": True, 30 | "default": None, 31 | "description": None, 32 | "docstring": None, 33 | "constraints": {"ge": 1, "le": 2147483647}, 34 | "db_field_types": {"": "INT"}, 35 | }, 36 | "data_fields": [ 37 | { 38 | "name": "slug", 39 | "field_type": "CharField", 40 | "db_column": "slug", 41 | "python_type": "str", 42 | "generated": False, 43 | "nullable": False, 44 | "unique": False, 45 | "indexed": False, 46 | "default": None, 47 | "description": None, 48 | "docstring": None, 49 | "constraints": {"max_length": 200}, 50 | "db_field_types": {"": "VARCHAR(200)"}, 51 | }, 52 | { 53 | "name": "name", 54 | "field_type": "CharField", 55 | "db_column": "name", 56 | "python_type": "str", 57 | "generated": False, 58 | "nullable": False, 59 | "unique": False, 60 | "indexed": False, 61 | "default": None, 62 | "description": None, 63 | "docstring": None, 64 | "constraints": {"max_length": 200}, 65 | "db_field_types": {"": "VARCHAR(200)"}, 66 | }, 67 | { 68 | "name": "created_at", 69 | "field_type": "DatetimeField", 70 | "db_column": "created_at", 71 | "python_type": "datetime.datetime", 72 | "generated": False, 73 | "nullable": False, 74 | "unique": False, 75 | "indexed": False, 76 | "default": None, 77 | "description": None, 78 | "docstring": None, 79 | "constraints": {"readOnly": True}, 80 | "db_field_types": { 81 | "": "TIMESTAMP", 82 | "mysql": "DATETIME(6)", 83 | "postgres": "TIMESTAMPTZ", 84 | }, 85 | "auto_now_add": True, 86 | "auto_now": False, 87 | }, 88 | { 89 | "name": "user_id", 90 | "field_type": "IntField", 91 | "db_column": "user_id", 92 | "python_type": "int", 93 | "generated": False, 94 | "nullable": False, 95 | "unique": False, 96 | "indexed": False, 97 | "default": None, 98 | "description": "User", 99 | "docstring": None, 100 | "constraints": {"ge": 1, "le": 2147483647}, 101 | "db_field_types": {"": "INT"}, 102 | }, 103 | ], 104 | "fk_fields": [ 105 | { 106 | "name": "user", 107 | "field_type": "ForeignKeyFieldInstance", 108 | "python_type": "models.User", 109 | "generated": False, 110 | "nullable": False, 111 | "unique": False, 112 | "indexed": False, 113 | "default": None, 114 | "description": "User", 115 | "docstring": None, 116 | "constraints": {}, 117 | "raw_field": "user_id", 118 | "on_delete": "CASCADE", 119 | } 120 | ], 121 | "backward_fk_fields": [], 122 | "o2o_fields": [], 123 | "backward_o2o_fields": [], 124 | "m2m_fields": [ 125 | { 126 | "name": "products", 127 | "field_type": "ManyToManyFieldInstance", 128 | "python_type": "models.Product", 129 | "generated": False, 130 | "nullable": False, 131 | "unique": False, 132 | "indexed": False, 133 | "default": None, 134 | "description": None, 135 | "docstring": None, 136 | "constraints": {}, 137 | "model_name": "models.Product", 138 | "related_name": "categories", 139 | "forward_key": "product_id", 140 | "backward_key": "category_id", 141 | "through": "product_category", 142 | "on_delete": "CASCADE", 143 | "_generated": True, 144 | } 145 | ], 146 | }, 147 | "models.Config": { 148 | "name": "models.Config", 149 | "app": "models", 150 | "table": "configs", 151 | "abstract": False, 152 | "description": None, 153 | "docstring": None, 154 | "unique_together": [], 155 | "indexes": [], 156 | "pk_field": { 157 | "name": "id", 158 | "field_type": "IntField", 159 | "db_column": "id", 160 | "python_type": "int", 161 | "generated": True, 162 | "nullable": False, 163 | "unique": True, 164 | "indexed": True, 165 | "default": None, 166 | "description": None, 167 | "docstring": None, 168 | "constraints": {"ge": 1, "le": 2147483647}, 169 | "db_field_types": {"": "INT"}, 170 | }, 171 | "data_fields": [ 172 | { 173 | "name": "label", 174 | "field_type": "CharField", 175 | "db_column": "label", 176 | "python_type": "str", 177 | "generated": False, 178 | "nullable": False, 179 | "unique": False, 180 | "indexed": False, 181 | "default": None, 182 | "description": None, 183 | "docstring": None, 184 | "constraints": {"max_length": 200}, 185 | "db_field_types": {"": "VARCHAR(200)"}, 186 | }, 187 | { 188 | "name": "key", 189 | "field_type": "CharField", 190 | "db_column": "key", 191 | "python_type": "str", 192 | "generated": False, 193 | "nullable": False, 194 | "unique": False, 195 | "indexed": False, 196 | "default": None, 197 | "description": None, 198 | "docstring": None, 199 | "constraints": {"max_length": 20}, 200 | "db_field_types": {"": "VARCHAR(20)"}, 201 | }, 202 | { 203 | "name": "value", 204 | "field_type": "JSONField", 205 | "db_column": "value", 206 | "python_type": "Union[dict, list]", 207 | "generated": False, 208 | "nullable": False, 209 | "unique": False, 210 | "indexed": False, 211 | "default": None, 212 | "description": None, 213 | "docstring": None, 214 | "constraints": {}, 215 | "db_field_types": {"": "TEXT", "postgres": "JSONB"}, 216 | }, 217 | { 218 | "name": "status", 219 | "field_type": "IntEnumFieldInstance", 220 | "db_column": "status", 221 | "python_type": "int", 222 | "generated": False, 223 | "nullable": False, 224 | "unique": False, 225 | "indexed": False, 226 | "default": 1, 227 | "description": "on: 1\noff: 0", 228 | "docstring": None, 229 | "constraints": {"ge": -32768, "le": 32767}, 230 | "db_field_types": {"": "SMALLINT"}, 231 | }, 232 | ], 233 | "fk_fields": [], 234 | "backward_fk_fields": [], 235 | "o2o_fields": [], 236 | "backward_o2o_fields": [], 237 | "m2m_fields": [], 238 | }, 239 | "models.Email": { 240 | "name": "models.Email", 241 | "app": "models", 242 | "table": "email", 243 | "abstract": False, 244 | "description": None, 245 | "docstring": None, 246 | "unique_together": [], 247 | "indexes": [], 248 | "pk_field": { 249 | "name": "id", 250 | "field_type": "IntField", 251 | "db_column": "id", 252 | "python_type": "int", 253 | "generated": True, 254 | "nullable": False, 255 | "unique": True, 256 | "indexed": True, 257 | "default": None, 258 | "description": None, 259 | "docstring": None, 260 | "constraints": {"ge": 1, "le": 2147483647}, 261 | "db_field_types": {"": "INT"}, 262 | }, 263 | "data_fields": [ 264 | { 265 | "name": "email", 266 | "field_type": "CharField", 267 | "db_column": "email", 268 | "python_type": "str", 269 | "generated": False, 270 | "nullable": False, 271 | "unique": False, 272 | "indexed": False, 273 | "default": None, 274 | "description": None, 275 | "docstring": None, 276 | "constraints": {"max_length": 200}, 277 | "db_field_types": {"": "VARCHAR(200)"}, 278 | }, 279 | { 280 | "name": "is_primary", 281 | "field_type": "BooleanField", 282 | "db_column": "is_primary", 283 | "python_type": "bool", 284 | "generated": False, 285 | "nullable": False, 286 | "unique": False, 287 | "indexed": False, 288 | "default": False, 289 | "description": None, 290 | "docstring": None, 291 | "constraints": {}, 292 | "db_field_types": {"": "BOOL", "sqlite": "INT"}, 293 | }, 294 | { 295 | "name": "user_id", 296 | "field_type": "IntField", 297 | "db_column": "user_id", 298 | "python_type": "int", 299 | "generated": False, 300 | "nullable": False, 301 | "unique": False, 302 | "indexed": False, 303 | "default": None, 304 | "description": None, 305 | "docstring": None, 306 | "constraints": {"ge": 1, "le": 2147483647}, 307 | "db_field_types": {"": "INT"}, 308 | }, 309 | ], 310 | "fk_fields": [ 311 | { 312 | "name": "user", 313 | "field_type": "ForeignKeyFieldInstance", 314 | "python_type": "models.User", 315 | "generated": False, 316 | "nullable": False, 317 | "unique": False, 318 | "indexed": False, 319 | "default": None, 320 | "description": None, 321 | "docstring": None, 322 | "constraints": {}, 323 | "raw_field": "user_id", 324 | "on_delete": "CASCADE", 325 | } 326 | ], 327 | "backward_fk_fields": [], 328 | "o2o_fields": [], 329 | "backward_o2o_fields": [], 330 | "m2m_fields": [], 331 | }, 332 | "models.Product": { 333 | "name": "models.Product", 334 | "app": "models", 335 | "table": "product", 336 | "abstract": False, 337 | "description": None, 338 | "docstring": None, 339 | "unique_together": [], 340 | "indexes": [], 341 | "pk_field": { 342 | "name": "id", 343 | "field_type": "IntField", 344 | "db_column": "id", 345 | "python_type": "int", 346 | "generated": True, 347 | "nullable": False, 348 | "unique": True, 349 | "indexed": True, 350 | "default": None, 351 | "description": None, 352 | "docstring": None, 353 | "constraints": {"ge": 1, "le": 2147483647}, 354 | "db_field_types": {"": "INT"}, 355 | }, 356 | "data_fields": [ 357 | { 358 | "name": "name", 359 | "field_type": "CharField", 360 | "db_column": "name", 361 | "python_type": "str", 362 | "generated": False, 363 | "nullable": False, 364 | "unique": False, 365 | "indexed": False, 366 | "default": None, 367 | "description": None, 368 | "docstring": None, 369 | "constraints": {"max_length": 50}, 370 | "db_field_types": {"": "VARCHAR(50)"}, 371 | }, 372 | { 373 | "name": "view_num", 374 | "field_type": "IntField", 375 | "db_column": "view_num", 376 | "python_type": "int", 377 | "generated": False, 378 | "nullable": False, 379 | "unique": False, 380 | "indexed": False, 381 | "default": None, 382 | "description": "View Num", 383 | "docstring": None, 384 | "constraints": {"ge": -2147483648, "le": 2147483647}, 385 | "db_field_types": {"": "INT"}, 386 | }, 387 | { 388 | "name": "sort", 389 | "field_type": "IntField", 390 | "db_column": "sort", 391 | "python_type": "int", 392 | "generated": False, 393 | "nullable": False, 394 | "unique": False, 395 | "indexed": False, 396 | "default": None, 397 | "description": None, 398 | "docstring": None, 399 | "constraints": {"ge": -2147483648, "le": 2147483647}, 400 | "db_field_types": {"": "INT"}, 401 | }, 402 | { 403 | "name": "is_reviewed", 404 | "field_type": "BooleanField", 405 | "db_column": "is_reviewed", 406 | "python_type": "bool", 407 | "generated": False, 408 | "nullable": False, 409 | "unique": False, 410 | "indexed": False, 411 | "default": None, 412 | "description": "Is Reviewed", 413 | "docstring": None, 414 | "constraints": {}, 415 | "db_field_types": {"": "BOOL", "sqlite": "INT"}, 416 | }, 417 | { 418 | "name": "type", 419 | "field_type": "IntEnumFieldInstance", 420 | "db_column": "type_db_alias", 421 | "python_type": "int", 422 | "generated": False, 423 | "nullable": False, 424 | "unique": False, 425 | "indexed": False, 426 | "default": None, 427 | "description": "Product Type", 428 | "docstring": None, 429 | "constraints": {"ge": -32768, "le": 32767}, 430 | "db_field_types": {"": "SMALLINT"}, 431 | }, 432 | { 433 | "name": "image", 434 | "field_type": "CharField", 435 | "db_column": "image", 436 | "python_type": "str", 437 | "generated": False, 438 | "nullable": False, 439 | "unique": False, 440 | "indexed": False, 441 | "default": None, 442 | "description": None, 443 | "docstring": None, 444 | "constraints": {"max_length": 200}, 445 | "db_field_types": {"": "VARCHAR(200)"}, 446 | }, 447 | { 448 | "name": "body", 449 | "field_type": "TextField", 450 | "db_column": "body", 451 | "python_type": "str", 452 | "generated": False, 453 | "nullable": False, 454 | "unique": False, 455 | "indexed": False, 456 | "default": None, 457 | "description": None, 458 | "docstring": None, 459 | "constraints": {}, 460 | "db_field_types": {"": "TEXT", "mysql": "LONGTEXT"}, 461 | }, 462 | { 463 | "name": "created_at", 464 | "field_type": "DatetimeField", 465 | "db_column": "created_at", 466 | "python_type": "datetime.datetime", 467 | "generated": False, 468 | "nullable": False, 469 | "unique": False, 470 | "indexed": False, 471 | "default": None, 472 | "description": None, 473 | "docstring": None, 474 | "constraints": {"readOnly": True}, 475 | "db_field_types": { 476 | "": "TIMESTAMP", 477 | "mysql": "DATETIME(6)", 478 | "postgres": "TIMESTAMPTZ", 479 | }, 480 | "auto_now_add": True, 481 | "auto_now": False, 482 | }, 483 | ], 484 | "fk_fields": [], 485 | "backward_fk_fields": [], 486 | "o2o_fields": [], 487 | "backward_o2o_fields": [], 488 | "m2m_fields": [ 489 | { 490 | "name": "categories", 491 | "field_type": "ManyToManyFieldInstance", 492 | "python_type": "models.Category", 493 | "generated": False, 494 | "nullable": False, 495 | "unique": False, 496 | "indexed": False, 497 | "default": None, 498 | "description": None, 499 | "docstring": None, 500 | "constraints": {}, 501 | "model_name": "models.Category", 502 | "related_name": "products", 503 | "forward_key": "category_id", 504 | "backward_key": "product_id", 505 | "through": "product_category", 506 | "on_delete": "CASCADE", 507 | "_generated": False, 508 | } 509 | ], 510 | }, 511 | "models.User": { 512 | "name": "models.User", 513 | "app": "models", 514 | "table": "user", 515 | "abstract": False, 516 | "description": None, 517 | "docstring": None, 518 | "unique_together": [], 519 | "indexes": [], 520 | "pk_field": { 521 | "name": "id", 522 | "field_type": "IntField", 523 | "db_column": "id", 524 | "python_type": "int", 525 | "generated": True, 526 | "nullable": False, 527 | "unique": True, 528 | "indexed": True, 529 | "default": None, 530 | "description": None, 531 | "docstring": None, 532 | "constraints": {"ge": 1, "le": 2147483647}, 533 | "db_field_types": {"": "INT"}, 534 | }, 535 | "data_fields": [ 536 | { 537 | "name": "username", 538 | "field_type": "CharField", 539 | "db_column": "username", 540 | "python_type": "str", 541 | "generated": False, 542 | "nullable": False, 543 | "unique": False, 544 | "indexed": False, 545 | "default": None, 546 | "description": None, 547 | "docstring": None, 548 | "constraints": {"max_length": 20}, 549 | "db_field_types": {"": "VARCHAR(20)"}, 550 | }, 551 | { 552 | "name": "password", 553 | "field_type": "CharField", 554 | "db_column": "password", 555 | "python_type": "str", 556 | "generated": False, 557 | "nullable": False, 558 | "unique": False, 559 | "indexed": False, 560 | "default": None, 561 | "description": None, 562 | "docstring": None, 563 | "constraints": {"max_length": 200}, 564 | "db_field_types": {"": "VARCHAR(200)"}, 565 | }, 566 | { 567 | "name": "last_login", 568 | "field_type": "DatetimeField", 569 | "db_column": "last_login", 570 | "python_type": "datetime.datetime", 571 | "generated": False, 572 | "nullable": False, 573 | "unique": False, 574 | "indexed": False, 575 | "default": "", 576 | "description": "Last Login", 577 | "docstring": None, 578 | "constraints": {}, 579 | "db_field_types": { 580 | "": "TIMESTAMP", 581 | "mysql": "DATETIME(6)", 582 | "postgres": "TIMESTAMPTZ", 583 | }, 584 | "auto_now_add": False, 585 | "auto_now": False, 586 | }, 587 | { 588 | "name": "is_active", 589 | "field_type": "BooleanField", 590 | "db_column": "is_active", 591 | "python_type": "bool", 592 | "generated": False, 593 | "nullable": False, 594 | "unique": False, 595 | "indexed": False, 596 | "default": True, 597 | "description": "Is Active", 598 | "docstring": None, 599 | "constraints": {}, 600 | "db_field_types": {"": "BOOL", "sqlite": "INT"}, 601 | }, 602 | { 603 | "name": "is_superuser", 604 | "field_type": "BooleanField", 605 | "db_column": "is_superuser", 606 | "python_type": "bool", 607 | "generated": False, 608 | "nullable": False, 609 | "unique": False, 610 | "indexed": False, 611 | "default": False, 612 | "description": "Is SuperUser", 613 | "docstring": None, 614 | "constraints": {}, 615 | "db_field_types": {"": "BOOL", "sqlite": "INT"}, 616 | }, 617 | { 618 | "name": "avatar", 619 | "field_type": "CharField", 620 | "db_column": "avatar", 621 | "python_type": "str", 622 | "generated": False, 623 | "nullable": False, 624 | "unique": False, 625 | "indexed": False, 626 | "default": "", 627 | "description": None, 628 | "docstring": None, 629 | "constraints": {"max_length": 200}, 630 | "db_field_types": {"": "VARCHAR(200)"}, 631 | }, 632 | { 633 | "name": "intro", 634 | "field_type": "TextField", 635 | "db_column": "intro", 636 | "python_type": "str", 637 | "generated": False, 638 | "nullable": False, 639 | "unique": False, 640 | "indexed": False, 641 | "default": "", 642 | "description": None, 643 | "docstring": None, 644 | "constraints": {}, 645 | "db_field_types": {"": "TEXT", "mysql": "LONGTEXT"}, 646 | }, 647 | { 648 | "name": "longitude", 649 | "unique": False, 650 | "default": None, 651 | "indexed": False, 652 | "nullable": False, 653 | "db_column": "longitude", 654 | "docstring": None, 655 | "generated": False, 656 | "field_type": "DecimalField", 657 | "constraints": {}, 658 | "description": None, 659 | "python_type": "decimal.Decimal", 660 | "db_field_types": {"": "DECIMAL(12,9)", "sqlite": "VARCHAR(40)"}, 661 | }, 662 | ], 663 | "fk_fields": [], 664 | "backward_fk_fields": [ 665 | { 666 | "name": "categorys", 667 | "field_type": "BackwardFKRelation", 668 | "python_type": "models.Category", 669 | "generated": False, 670 | "nullable": False, 671 | "unique": False, 672 | "indexed": False, 673 | "default": None, 674 | "description": "User", 675 | "docstring": None, 676 | "constraints": {}, 677 | }, 678 | { 679 | "name": "emails", 680 | "field_type": "BackwardFKRelation", 681 | "python_type": "models.Email", 682 | "generated": False, 683 | "nullable": False, 684 | "unique": False, 685 | "indexed": False, 686 | "default": None, 687 | "description": None, 688 | "docstring": None, 689 | "constraints": {}, 690 | }, 691 | ], 692 | "o2o_fields": [], 693 | "backward_o2o_fields": [], 694 | "m2m_fields": [], 695 | }, 696 | "models.Aerich": { 697 | "name": "models.Aerich", 698 | "app": "models", 699 | "table": "aerich", 700 | "abstract": False, 701 | "description": None, 702 | "docstring": None, 703 | "unique_together": [], 704 | "indexes": [], 705 | "pk_field": { 706 | "name": "id", 707 | "field_type": "IntField", 708 | "db_column": "id", 709 | "python_type": "int", 710 | "generated": True, 711 | "nullable": False, 712 | "unique": True, 713 | "indexed": True, 714 | "default": None, 715 | "description": None, 716 | "docstring": None, 717 | "constraints": {"ge": 1, "le": 2147483647}, 718 | "db_field_types": {"": "INT"}, 719 | }, 720 | "data_fields": [ 721 | { 722 | "name": "version", 723 | "field_type": "CharField", 724 | "db_column": "version", 725 | "python_type": "str", 726 | "generated": False, 727 | "nullable": False, 728 | "unique": False, 729 | "indexed": False, 730 | "default": None, 731 | "description": None, 732 | "docstring": None, 733 | "constraints": {"max_length": 255}, 734 | "db_field_types": {"": "VARCHAR(255)"}, 735 | }, 736 | { 737 | "name": "app", 738 | "field_type": "CharField", 739 | "db_column": "app", 740 | "python_type": "str", 741 | "generated": False, 742 | "nullable": False, 743 | "unique": False, 744 | "indexed": False, 745 | "default": None, 746 | "description": None, 747 | "docstring": None, 748 | "constraints": {"max_length": 20}, 749 | "db_field_types": {"": "VARCHAR(20)"}, 750 | }, 751 | { 752 | "name": "content", 753 | "field_type": "JSONField", 754 | "db_column": "content", 755 | "python_type": "Union[dict, list]", 756 | "generated": False, 757 | "nullable": False, 758 | "unique": False, 759 | "indexed": False, 760 | "default": None, 761 | "description": None, 762 | "docstring": None, 763 | "constraints": {}, 764 | "db_field_types": {"": "TEXT", "postgres": "JSONB"}, 765 | }, 766 | ], 767 | "fk_fields": [], 768 | "backward_fk_fields": [], 769 | "o2o_fields": [], 770 | "backward_o2o_fields": [], 771 | "m2m_fields": [], 772 | }, 773 | } 774 | 775 | 776 | def test_migrate(mocker: MockerFixture): 777 | """ 778 | models.py diff with old_models.py 779 | - change email pk: id -> email_id 780 | - add field: Email.address 781 | - add fk: Config.user 782 | - drop fk: Email.user 783 | - drop field: User.avatar 784 | - add index: Email.email 785 | - add many to many: Email.users 786 | - remove unique: User.username 787 | - change column: length User.password 788 | - add unique_together: (name,type) of Product 789 | - alter default: Config.status 790 | - rename column: Product.image -> Product.pic 791 | """ 792 | mocker.patch("click.prompt", side_effect=(True,)) 793 | 794 | models_describe = get_models_describe("models") 795 | Migrate.app = "models" 796 | if isinstance(Migrate.ddl, SqliteDDL): 797 | with pytest.raises(NotSupportError): 798 | Migrate.diff_models(old_models_describe, models_describe) 799 | Migrate.diff_models(models_describe, old_models_describe, False) 800 | else: 801 | Migrate.diff_models(old_models_describe, models_describe) 802 | Migrate.diff_models(models_describe, old_models_describe, False) 803 | Migrate._merge_operators() 804 | if isinstance(Migrate.ddl, MysqlDDL): 805 | expected_upgrade_operators = { 806 | "ALTER TABLE `category` MODIFY COLUMN `name` VARCHAR(200)", 807 | "ALTER TABLE `category` MODIFY COLUMN `slug` VARCHAR(100) NOT NULL", 808 | "ALTER TABLE `config` ADD `user_id` INT NOT NULL COMMENT 'User'", 809 | "ALTER TABLE `config` ADD CONSTRAINT `fk_config_user_17daa970` FOREIGN KEY (`user_id`) REFERENCES `user` (`id`) ON DELETE CASCADE", 810 | "ALTER TABLE `config` ALTER COLUMN `status` DROP DEFAULT", 811 | "ALTER TABLE `config` MODIFY COLUMN `value` JSON NOT NULL", 812 | "ALTER TABLE `email` ADD `address` VARCHAR(200) NOT NULL", 813 | "ALTER TABLE `email` DROP COLUMN `user_id`", 814 | "ALTER TABLE `configs` RENAME TO `config`", 815 | "ALTER TABLE `product` RENAME COLUMN `image` TO `pic`", 816 | "ALTER TABLE `email` RENAME COLUMN `id` TO `email_id`", 817 | "ALTER TABLE `product` ADD INDEX `idx_product_name_869427` (`name`, `type_db_alias`)", 818 | "ALTER TABLE `email` ADD INDEX `idx_email_email_4a1a33` (`email`)", 819 | "ALTER TABLE `product` ADD UNIQUE INDEX `uid_product_name_869427` (`name`, `type_db_alias`)", 820 | "ALTER TABLE `product` ALTER COLUMN `view_num` SET DEFAULT 0", 821 | "ALTER TABLE `product` MODIFY COLUMN `created_at` DATETIME(6) NOT NULL DEFAULT CURRENT_TIMESTAMP(6)", 822 | "ALTER TABLE `product` MODIFY COLUMN `is_reviewed` BOOL NOT NULL COMMENT 'Is Reviewed'", 823 | "ALTER TABLE `user` DROP COLUMN `avatar`", 824 | "ALTER TABLE `user` MODIFY COLUMN `password` VARCHAR(100) NOT NULL", 825 | "ALTER TABLE `user` MODIFY COLUMN `intro` LONGTEXT NOT NULL", 826 | "ALTER TABLE `user` MODIFY COLUMN `last_login` DATETIME(6) NOT NULL COMMENT 'Last Login'", 827 | "ALTER TABLE `user` MODIFY COLUMN `is_active` BOOL NOT NULL COMMENT 'Is Active' DEFAULT 1", 828 | "ALTER TABLE `user` MODIFY COLUMN `is_superuser` BOOL NOT NULL COMMENT 'Is SuperUser' DEFAULT 0", 829 | "ALTER TABLE `user` MODIFY COLUMN `longitude` DECIMAL(10,8) NOT NULL", 830 | "ALTER TABLE `user` ADD UNIQUE INDEX `uid_user_usernam_9987ab` (`username`)", 831 | "CREATE TABLE `email_user` (\n `email_id` INT NOT NULL REFERENCES `email` (`email_id`) ON DELETE CASCADE,\n `user_id` INT NOT NULL REFERENCES `user` (`id`) ON DELETE CASCADE\n) CHARACTER SET utf8mb4", 832 | "CREATE TABLE IF NOT EXISTS `newmodel` (\n `id` INT NOT NULL PRIMARY KEY AUTO_INCREMENT,\n `name` VARCHAR(50) NOT NULL\n) CHARACTER SET utf8mb4", 833 | "ALTER TABLE `category` MODIFY COLUMN `created_at` DATETIME(6) NOT NULL DEFAULT CURRENT_TIMESTAMP(6)", 834 | "ALTER TABLE `product` MODIFY COLUMN `body` LONGTEXT NOT NULL", 835 | "ALTER TABLE `email` MODIFY COLUMN `is_primary` BOOL NOT NULL DEFAULT 0", 836 | } 837 | expected_downgrade_operators = { 838 | "ALTER TABLE `category` MODIFY COLUMN `name` VARCHAR(200) NOT NULL", 839 | "ALTER TABLE `category` MODIFY COLUMN `slug` VARCHAR(200) NOT NULL", 840 | "ALTER TABLE `config` DROP COLUMN `user_id`", 841 | "ALTER TABLE `config` DROP FOREIGN KEY `fk_config_user_17daa970`", 842 | "ALTER TABLE `config` ALTER COLUMN `status` SET DEFAULT 1", 843 | "ALTER TABLE `email` ADD `user_id` INT NOT NULL", 844 | "ALTER TABLE `email` DROP COLUMN `address`", 845 | "ALTER TABLE `config` RENAME TO `configs`", 846 | "ALTER TABLE `product` RENAME COLUMN `pic` TO `image`", 847 | "ALTER TABLE `email` RENAME COLUMN `email_id` TO `id`", 848 | "ALTER TABLE `product` DROP INDEX `idx_product_name_869427`", 849 | "ALTER TABLE `email` DROP INDEX `idx_email_email_4a1a33`", 850 | "ALTER TABLE `product` DROP INDEX `uid_product_name_869427`", 851 | "ALTER TABLE `product` ALTER COLUMN `view_num` DROP DEFAULT", 852 | "ALTER TABLE `user` ADD `avatar` VARCHAR(200) NOT NULL DEFAULT ''", 853 | "ALTER TABLE `user` DROP INDEX `idx_user_usernam_9987ab`", 854 | "ALTER TABLE `user` MODIFY COLUMN `password` VARCHAR(200) NOT NULL", 855 | "DROP TABLE IF EXISTS `email_user`", 856 | "DROP TABLE IF EXISTS `newmodel`", 857 | "ALTER TABLE `user` MODIFY COLUMN `intro` LONGTEXT NOT NULL", 858 | "ALTER TABLE `config` MODIFY COLUMN `value` TEXT NOT NULL", 859 | "ALTER TABLE `category` MODIFY COLUMN `created_at` DATETIME(6) NOT NULL DEFAULT CURRENT_TIMESTAMP(6)", 860 | "ALTER TABLE `product` MODIFY COLUMN `created_at` DATETIME(6) NOT NULL DEFAULT CURRENT_TIMESTAMP(6)", 861 | "ALTER TABLE `product` MODIFY COLUMN `is_reviewed` BOOL NOT NULL COMMENT 'Is Reviewed'", 862 | "ALTER TABLE `user` MODIFY COLUMN `last_login` DATETIME(6) NOT NULL COMMENT 'Last Login'", 863 | "ALTER TABLE `user` MODIFY COLUMN `is_active` BOOL NOT NULL COMMENT 'Is Active' DEFAULT 1", 864 | "ALTER TABLE `user` MODIFY COLUMN `is_superuser` BOOL NOT NULL COMMENT 'Is SuperUser' DEFAULT 0", 865 | "ALTER TABLE `user` MODIFY COLUMN `longitude` DECIMAL(12,9) NOT NULL", 866 | "ALTER TABLE `product` MODIFY COLUMN `body` LONGTEXT NOT NULL", 867 | "ALTER TABLE `email` MODIFY COLUMN `is_primary` BOOL NOT NULL DEFAULT 0", 868 | } 869 | assert not set(Migrate.upgrade_operators).symmetric_difference(expected_upgrade_operators) 870 | 871 | assert not set(Migrate.downgrade_operators).symmetric_difference( 872 | expected_downgrade_operators 873 | ) 874 | 875 | elif isinstance(Migrate.ddl, PostgresDDL): 876 | expected_upgrade_operators = { 877 | 'ALTER TABLE "category" ALTER COLUMN "name" DROP NOT NULL', 878 | 'ALTER TABLE "category" ALTER COLUMN "slug" TYPE VARCHAR(100) USING "slug"::VARCHAR(100)', 879 | 'ALTER TABLE "category" ALTER COLUMN "created_at" TYPE TIMESTAMPTZ USING "created_at"::TIMESTAMPTZ', 880 | 'ALTER TABLE "config" ADD "user_id" INT NOT NULL', 881 | 'ALTER TABLE "config" ADD CONSTRAINT "fk_config_user_17daa970" FOREIGN KEY ("user_id") REFERENCES "user" ("id") ON DELETE CASCADE', 882 | 'ALTER TABLE "config" ALTER COLUMN "status" DROP DEFAULT', 883 | 'ALTER TABLE "config" ALTER COLUMN "value" TYPE JSONB USING "value"::JSONB', 884 | 'ALTER TABLE "configs" RENAME TO "config"', 885 | 'ALTER TABLE "email" ADD "address" VARCHAR(200) NOT NULL', 886 | 'ALTER TABLE "email" DROP COLUMN "user_id"', 887 | 'ALTER TABLE "email" RENAME COLUMN "id" TO "email_id"', 888 | 'ALTER TABLE "email" ALTER COLUMN "is_primary" TYPE BOOL USING "is_primary"::BOOL', 889 | 'ALTER TABLE "product" ALTER COLUMN "view_num" SET DEFAULT 0', 890 | 'ALTER TABLE "product" RENAME COLUMN "image" TO "pic"', 891 | 'ALTER TABLE "product" ALTER COLUMN "is_reviewed" TYPE BOOL USING "is_reviewed"::BOOL', 892 | 'ALTER TABLE "product" ALTER COLUMN "body" TYPE TEXT USING "body"::TEXT', 893 | 'ALTER TABLE "product" ALTER COLUMN "created_at" TYPE TIMESTAMPTZ USING "created_at"::TIMESTAMPTZ', 894 | 'ALTER TABLE "user" ALTER COLUMN "password" TYPE VARCHAR(100) USING "password"::VARCHAR(100)', 895 | 'ALTER TABLE "user" DROP COLUMN "avatar"', 896 | 'ALTER TABLE "user" ALTER COLUMN "is_superuser" TYPE BOOL USING "is_superuser"::BOOL', 897 | 'ALTER TABLE "user" ALTER COLUMN "last_login" TYPE TIMESTAMPTZ USING "last_login"::TIMESTAMPTZ', 898 | 'ALTER TABLE "user" ALTER COLUMN "intro" TYPE TEXT USING "intro"::TEXT', 899 | 'ALTER TABLE "user" ALTER COLUMN "is_active" TYPE BOOL USING "is_active"::BOOL', 900 | 'ALTER TABLE "user" ALTER COLUMN "longitude" TYPE DECIMAL(10,8) USING "longitude"::DECIMAL(10,8)', 901 | 'CREATE INDEX "idx_product_name_869427" ON "product" ("name", "type_db_alias")', 902 | 'CREATE INDEX "idx_email_email_4a1a33" ON "email" ("email")', 903 | 'CREATE TABLE "email_user" (\n "email_id" INT NOT NULL REFERENCES "email" ("email_id") ON DELETE CASCADE,\n "user_id" INT NOT NULL REFERENCES "user" ("id") ON DELETE CASCADE\n)', 904 | 'CREATE TABLE IF NOT EXISTS "newmodel" (\n "id" SERIAL NOT NULL PRIMARY KEY,\n "name" VARCHAR(50) NOT NULL\n);\nCOMMENT ON COLUMN "config"."user_id" IS \'User\'', 905 | 'CREATE UNIQUE INDEX "uid_product_name_869427" ON "product" ("name", "type_db_alias")', 906 | 'CREATE UNIQUE INDEX "uid_user_usernam_9987ab" ON "user" ("username")', 907 | } 908 | expected_downgrade_operators = { 909 | 'ALTER TABLE "category" ALTER COLUMN "name" SET NOT NULL', 910 | 'ALTER TABLE "category" ALTER COLUMN "slug" TYPE VARCHAR(200) USING "slug"::VARCHAR(200)', 911 | 'ALTER TABLE "category" ALTER COLUMN "created_at" TYPE TIMESTAMPTZ USING "created_at"::TIMESTAMPTZ', 912 | 'ALTER TABLE "config" ALTER COLUMN "status" SET DEFAULT 1', 913 | 'ALTER TABLE "config" DROP COLUMN "user_id"', 914 | 'ALTER TABLE "config" DROP CONSTRAINT "fk_config_user_17daa970"', 915 | 'ALTER TABLE "config" RENAME TO "configs"', 916 | 'ALTER TABLE "config" ALTER COLUMN "value" TYPE JSONB USING "value"::JSONB', 917 | 'ALTER TABLE "email" ADD "user_id" INT NOT NULL', 918 | 'ALTER TABLE "email" DROP COLUMN "address"', 919 | 'ALTER TABLE "email" RENAME COLUMN "email_id" TO "id"', 920 | 'ALTER TABLE "email" ALTER COLUMN "is_primary" TYPE BOOL USING "is_primary"::BOOL', 921 | 'ALTER TABLE "product" ALTER COLUMN "view_num" DROP DEFAULT', 922 | 'ALTER TABLE "product" RENAME COLUMN "pic" TO "image"', 923 | 'ALTER TABLE "user" ADD "avatar" VARCHAR(200) NOT NULL DEFAULT \'\'', 924 | 'ALTER TABLE "user" ALTER COLUMN "password" TYPE VARCHAR(200) USING "password"::VARCHAR(200)', 925 | 'ALTER TABLE "user" ALTER COLUMN "last_login" TYPE TIMESTAMPTZ USING "last_login"::TIMESTAMPTZ', 926 | 'ALTER TABLE "user" ALTER COLUMN "is_superuser" TYPE BOOL USING "is_superuser"::BOOL', 927 | 'ALTER TABLE "user" ALTER COLUMN "is_active" TYPE BOOL USING "is_active"::BOOL', 928 | 'ALTER TABLE "user" ALTER COLUMN "intro" TYPE TEXT USING "intro"::TEXT', 929 | 'ALTER TABLE "user" ALTER COLUMN "longitude" TYPE DECIMAL(12,9) USING "longitude"::DECIMAL(12,9)', 930 | 'ALTER TABLE "product" ALTER COLUMN "created_at" TYPE TIMESTAMPTZ USING "created_at"::TIMESTAMPTZ', 931 | 'ALTER TABLE "product" ALTER COLUMN "is_reviewed" TYPE BOOL USING "is_reviewed"::BOOL', 932 | 'ALTER TABLE "product" ALTER COLUMN "body" TYPE TEXT USING "body"::TEXT', 933 | 'DROP INDEX "idx_product_name_869427"', 934 | 'DROP INDEX "idx_email_email_4a1a33"', 935 | 'DROP INDEX "idx_user_usernam_9987ab"', 936 | 'DROP INDEX "uid_product_name_869427"', 937 | 'DROP TABLE IF EXISTS "email_user"', 938 | 'DROP TABLE IF EXISTS "newmodel"', 939 | } 940 | assert not set(Migrate.upgrade_operators).symmetric_difference(expected_upgrade_operators) 941 | assert not set(Migrate.downgrade_operators).symmetric_difference( 942 | expected_downgrade_operators 943 | ) 944 | 945 | elif isinstance(Migrate.ddl, SqliteDDL): 946 | assert Migrate.upgrade_operators == [] 947 | assert Migrate.downgrade_operators == [] 948 | 949 | 950 | def test_sort_all_version_files(mocker): 951 | mocker.patch( 952 | "os.listdir", 953 | return_value=[ 954 | "1_datetime_update.py", 955 | "11_datetime_update.py", 956 | "10_datetime_update.py", 957 | "2_datetime_update.py", 958 | ], 959 | ) 960 | 961 | Migrate.migrate_location = "." 962 | 963 | assert Migrate.get_all_version_files() == [ 964 | "1_datetime_update.py", 965 | "2_datetime_update.py", 966 | "10_datetime_update.py", 967 | "11_datetime_update.py", 968 | ] 969 | --------------------------------------------------------------------------------