├── .github ├── FUNDING.yml └── workflows │ ├── ci.yml │ └── pypi.yml ├── .gitignore ├── CHANGELOG.md ├── LICENSE ├── Makefile ├── README.md ├── README_RU.md ├── aerich ├── __init__.py ├── __main__.py ├── _compat.py ├── cli.py ├── coder.py ├── ddl │ ├── __init__.py │ ├── mysql │ │ └── __init__.py │ ├── postgres │ │ └── __init__.py │ └── sqlite │ │ └── __init__.py ├── enums.py ├── exceptions.py ├── inspectdb │ ├── __init__.py │ ├── mysql.py │ ├── postgres.py │ └── sqlite.py ├── migrate.py ├── models.py ├── utils.py └── version.py ├── conftest.py ├── poetry.lock ├── pyproject.toml └── tests ├── __init__.py ├── _utils.py ├── assets ├── fake │ ├── _tests.py │ ├── db.py │ └── settings.py ├── migrate_no_input │ ├── _tests.py │ ├── models.py │ └── settings.py ├── missing_aerich_models │ ├── models.py │ └── settings.py ├── remove_constraint │ ├── _tests.py │ ├── db.py │ ├── models.py │ ├── models_2.py │ ├── models_3.py │ ├── models_4.py │ ├── models_5.py │ └── settings.py └── sqlite_migrate │ ├── _tests.py │ ├── conftest_.py │ ├── models.py │ └── settings.py ├── indexes.py ├── models.py ├── models_second.py ├── old_models.py ├── test_cli.py ├── test_command.py ├── test_ddl.py ├── test_fake.py ├── test_inspectdb.py ├── test_migrate.py ├── test_python_m.py ├── test_remove_unique_constraint.py ├── test_sqlite_migrate.py └── test_utils.py /.github/FUNDING.yml: -------------------------------------------------------------------------------- 1 | custom: ["https://sponsor.long2ice.io"] 2 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: ci 2 | on: 3 | push: 4 | branches-ignore: 5 | - main 6 | pull_request: 7 | branches-ignore: 8 | - main 9 | jobs: 10 | ci: 11 | runs-on: ubuntu-latest 12 | services: 13 | postgres: 14 | image: postgres:latest 15 | ports: 16 | - 5432:5432 17 | env: 18 | POSTGRES_PASSWORD: 123456 19 | POSTGRES_USER: postgres 20 | options: --health-cmd=pg_isready --health-interval 10s --health-timeout 5s --health-retries 5 21 | strategy: 22 | matrix: 23 | python-version: ["3.9", "3.10", "3.11", "3.12", "3.13"] 24 | tortoise-orm: 25 | - tortoise024 26 | - tortoise025 27 | - tortoisedev 28 | steps: 29 | - name: Start MySQL 30 | run: sudo systemctl start mysql.service 31 | - uses: actions/checkout@v4 32 | - uses: actions/setup-python@v5 33 | with: 34 | python-version: ${{ matrix.python-version }} 35 | - uses: actions/cache@v4 36 | with: 37 | path: ~/.cache/pip 38 | key: ${{ runner.os }}-pip-${{ hashFiles('**/poetry.lock') }} 39 | restore-keys: | 40 | ${{ runner.os }}-pip- 41 | - name: Install and configure Poetry 42 | run: | 43 | pip install -U pip 44 | pip install "poetry>=2.0" 45 | poetry env use python${{ matrix.python-version }} 46 | - name: Install dependencies and check style 47 | run: poetry run make check 48 | - name: Install TortoiseORM v0.24 49 | if: matrix.tortoise-orm == 'tortoise024' 50 | run: poetry run pip install --upgrade "tortoise-orm>=0.24,<0.25" 51 | - name: Install TortoiseORM v0.25 52 | if: matrix.tortoise-orm == 'tortoise025' 53 | run: poetry run pip install --upgrade "tortoise-orm>=0.25,<0.26" 54 | - name: Install TortoiseORM develop branch 55 | if: matrix.tortoise-orm == 'tortoisedev' 56 | run: | 57 | poetry run pip uninstall -y tortoise-orm 58 | poetry run pip install --upgrade "git+https://github.com/tortoise/tortoise-orm" 59 | - name: CI 60 | env: 61 | MYSQL_PASS: root 62 | MYSQL_HOST: 127.0.0.1 63 | MYSQL_PORT: 3306 64 | POSTGRES_PASS: 123456 65 | POSTGRES_HOST: 127.0.0.1 66 | POSTGRES_PORT: 5432 67 | run: poetry run make _testall 68 | - name: Verify aiomysql support 69 | # Only check the latest version of tortoise 70 | if: matrix.tortoise-orm == 'tortoisedev' 71 | run: | 72 | poetry run pip uninstall -y asyncmy 73 | poetry run make test_mysql 74 | poetry run pip install asyncmy 75 | env: 76 | MYSQL_PASS: root 77 | MYSQL_HOST: 127.0.0.1 78 | MYSQL_PORT: 3306 79 | - name: Verify psycopg support 80 | # Only check the latest version of tortoise 81 | if: matrix.tortoise-orm == 'tortoisedev' 82 | run: poetry run make test_psycopg 83 | env: 84 | POSTGRES_PASS: 123456 85 | POSTGRES_HOST: 127.0.0.1 86 | POSTGRES_PORT: 5432 87 | -------------------------------------------------------------------------------- /.github/workflows/pypi.yml: -------------------------------------------------------------------------------- 1 | name: pypi 2 | on: 3 | release: 4 | types: 5 | - created 6 | jobs: 7 | publish: 8 | runs-on: ubuntu-latest 9 | steps: 10 | - uses: actions/checkout@v4 11 | - uses: actions/setup-python@v5 12 | with: 13 | python-version: '3.x' 14 | - name: Install and configure Poetry 15 | run: | 16 | pip install -U pip poetry 17 | poetry config virtualenvs.create false 18 | - name: Build dists 19 | run: make build 20 | - name: Pypi Publish 21 | uses: pypa/gh-action-pypi-publish@master 22 | with: 23 | user: __token__ 24 | password: ${{ secrets.pypi_password }} 25 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Created by .ignore support plugin (hsz.mobi) 2 | ### Python template 3 | # Byte-compiled / optimized / DLL files 4 | __pycache__/ 5 | *.py[cod] 6 | *$py.class 7 | 8 | # C extensions 9 | *.so 10 | 11 | # Distribution / packaging 12 | .Python 13 | build/ 14 | develop-eggs/ 15 | dist/ 16 | downloads/ 17 | eggs/ 18 | .eggs/ 19 | lib/ 20 | lib64/ 21 | parts/ 22 | sdist/ 23 | var/ 24 | wheels/ 25 | pip-wheel-metadata/ 26 | share/python-wheels/ 27 | *.egg-info/ 28 | .installed.cfg 29 | *.egg 30 | MANIFEST 31 | 32 | # PyInstaller 33 | # Usually these files are written by a python script from a template 34 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 35 | *.manifest 36 | *.spec 37 | 38 | # Installer logs 39 | pip-log.txt 40 | pip-delete-this-directory.txt 41 | 42 | # Unit test / coverage reports 43 | htmlcov/ 44 | .tox/ 45 | .nox/ 46 | .coverage 47 | .coverage.* 48 | .cache 49 | nosetests.xml 50 | coverage.xml 51 | *.cover 52 | *.py,cover 53 | .hypothesis/ 54 | .pytest_cache/ 55 | cover/ 56 | 57 | # Translations 58 | *.mo 59 | *.pot 60 | 61 | # Django stuff: 62 | *.log 63 | local_settings.py 64 | db.sqlite3 65 | db.sqlite3-journal 66 | 67 | # Flask stuff: 68 | instance/ 69 | .webassets-cache 70 | 71 | # Scrapy stuff: 72 | .scrapy 73 | 74 | # Sphinx documentation 75 | docs/_build/ 76 | 77 | # PyBuilder 78 | .pybuilder/ 79 | target/ 80 | 81 | # Jupyter Notebook 82 | .ipynb_checkpoints 83 | 84 | # IPython 85 | profile_default/ 86 | ipython_config.py 87 | 88 | # pyenv 89 | # For a library or package, you might want to ignore these files since the code is 90 | # intended to run in multiple environments; otherwise, check them in: 91 | # .python-version 92 | 93 | # pipenv 94 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 95 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 96 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 97 | # install all needed dependencies. 98 | #Pipfile.lock 99 | 100 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 101 | __pypackages__/ 102 | 103 | # Celery stuff 104 | celerybeat-schedule 105 | celerybeat.pid 106 | 107 | # SageMath parsed files 108 | *.sage.py 109 | 110 | # Environments 111 | .env 112 | .venv 113 | env/ 114 | venv/ 115 | ENV/ 116 | env.bak/ 117 | venv.bak/ 118 | 119 | # Spyder project settings 120 | .spyderproject 121 | .spyproject 122 | 123 | # Rope project settings 124 | .ropeproject 125 | 126 | # mkdocs documentation 127 | /site 128 | 129 | # mypy 130 | .mypy_cache/ 131 | .dmypy.json 132 | dmypy.json 133 | 134 | # Pyre type checker 135 | .pyre/ 136 | 137 | # pytype static type analyzer 138 | .pytype/ 139 | 140 | # Cython debug symbols 141 | cython_debug/ 142 | 143 | .idea 144 | migrations 145 | aerich.ini 146 | src 147 | .vscode 148 | .DS_Store 149 | .python-version -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # ChangeLog 2 | 3 | ## 0.9 4 | 5 | ### [0.9.1]**(Unreleased)** 6 | 7 | #### Fixed 8 | - fix: cryptic error message when 'aerich.models' not included. ([#454]) 9 | 10 | [#454]: https://github.com/tortoise/aerich/pull/454 11 | 12 | ### [0.9.0](../../releases/tag/v0.9.0) - 2025-05-12 13 | 14 | #### Added 15 | - Support `--no-input` for aerich migrate. ([#450]) 16 | 17 | ### Changed 18 | - Drop support for Python3.8. ([#446]) 19 | - Ask confirm before delete same version migration file. ([#451]) 20 | 21 | #### Fixed 22 | - fix: m2m migrate raises TypeError. ([#448]) 23 | - fix: `aerich init-db` process is suspended. ([#435]) 24 | - fix: migration will incorrectly remove constraints with index deletions. ([#450]) 25 | - fix: aerich migrate crashes without init-db & aerich init-db should create folder after validating app. ([#443]) 26 | 27 | [#451]: https://github.com/tortoise/aerich/pull/451 28 | [#450]: https://github.com/tortoise/aerich/pull/450 29 | [#448]: https://github.com/tortoise/aerich/pull/448 30 | [#446]: https://github.com/tortoise/aerich/pull/446 31 | [#443]: https://github.com/tortoise/aerich/pull/443 32 | [#435]: https://github.com/tortoise/aerich/pull/435 33 | 34 | ## 0.8 35 | 36 | ### [0.8.2](../../releases/tag/v0.8.2) - 2025-02-28 37 | 38 | #### Added 39 | - Support changes `max_length` or int type for primary key field. ([#428]) 40 | - feat: support psycopg. ([#425]) 41 | - Support run `poetry add aerich` in project that inited by poetry v2. ([#424]) 42 | - feat: support command `python -m aerich`. ([#417]) 43 | - feat: add --fake to upgrade/downgrade. ([#398]) 44 | - Support ignore table by settings `managed=False` in `Meta` class. ([#397]) 45 | 46 | #### Fixed 47 | - fix: aerich migrate raises tortoise.exceptions.FieldError when `index.INDEX_TYPE` is not empty. ([#415]) 48 | - No migration occurs as expected when adding `unique=True` to indexed field. ([#404]) 49 | - fix: inspectdb raise KeyError 'int2' for smallint. ([#401]) 50 | - fix: inspectdb not match data type 'DOUBLE' and 'CHAR' for MySQL. ([#187]) 51 | 52 | ### Changed 53 | - Refactored version management to use `importlib.metadata.version(__package__)` instead of hardcoded version string ([#412]) 54 | 55 | [#397]: https://github.com/tortoise/aerich/pull/397 56 | [#398]: https://github.com/tortoise/aerich/pull/398 57 | [#401]: https://github.com/tortoise/aerich/pull/401 58 | [#404]: https://github.com/tortoise/aerich/pull/404 59 | [#412]: https://github.com/tortoise/aerich/pull/412 60 | [#415]: https://github.com/tortoise/aerich/pull/415 61 | [#417]: https://github.com/tortoise/aerich/pull/417 62 | [#424]: https://github.com/tortoise/aerich/pull/424 63 | [#425]: https://github.com/tortoise/aerich/pull/425 64 | 65 | ### [0.8.1](../../releases/tag/v0.8.1) - 2024-12-27 66 | 67 | #### Fixed 68 | - fix: add o2o field does not create constraint when migrating. ([#396]) 69 | - Migration with duplicate renaming of columns in some cases. ([#395]) 70 | - fix: intermediate table for m2m relation not created. ([#394]) 71 | - Migrate add m2m field with custom through generate duplicated table. ([#393]) 72 | - Migrate drop the wrong m2m field when model have multi m2m fields. ([#376]) 73 | - KeyError raised when removing or renaming an existing model. ([#386]) 74 | - fix: error when there is `__init__.py` in the migration folder. ([#272]) 75 | - Setting null=false on m2m field causes migration to fail. ([#334]) 76 | - Fix NonExistentKey when running `aerich init` without `[tool]` section in config file. ([#284]) 77 | - Fix configuration file reading error when containing Chinese characters. ([#286]) 78 | - sqlite: failed to create/drop index. ([#302]) 79 | - PostgreSQL: Cannot drop constraint after deleting or rename FK on a model. ([#378]) 80 | - Fix create/drop indexes in every migration. ([#377]) 81 | - Sort m2m fields before comparing them with diff. ([#271]) 82 | 83 | #### Changed 84 | - Allow run `aerich init-db` with empty migration directories instead of abort with warnings. ([#286]) 85 | - Add version constraint(>=0.21) for tortoise-orm. ([#388]) 86 | - Move `tomlkit` to optional and support `pip install aerich[toml]`. ([#392]) 87 | 88 | [#396]: https://github.com/tortoise/aerich/pull/396 89 | [#395]: https://github.com/tortoise/aerich/pull/395 90 | [#394]: https://github.com/tortoise/aerich/pull/394 91 | [#393]: https://github.com/tortoise/aerich/pull/393 92 | [#392]: https://github.com/tortoise/aerich/pull/392 93 | [#388]: https://github.com/tortoise/aerich/pull/388 94 | [#386]: https://github.com/tortoise/aerich/pull/386 95 | [#378]: https://github.com/tortoise/aerich/pull/378 96 | [#377]: https://github.com/tortoise/aerich/pull/377 97 | [#376]: https://github.com/tortoise/aerich/pull/376 98 | [#334]: https://github.com/tortoise/aerich/pull/334 99 | [#302]: https://github.com/tortoise/aerich/pull/302 100 | [#286]: https://github.com/tortoise/aerich/pull/286 101 | [#284]: https://github.com/tortoise/aerich/pull/284 102 | [#272]: https://github.com/tortoise/aerich/pull/272 103 | [#271]: https://github.com/tortoise/aerich/pull/271 104 | 105 | ### [0.8.0](../../releases/tag/v0.8.0) - 2024-12-04 106 | 107 | - Fix the issue of parameter concatenation when generating ORM with inspectdb (#331) 108 | - Fix KeyError when deleting a field with unqiue=True. (#364) 109 | - Correct the click import. (#360) 110 | - Improve CLI help text and output. (#355) 111 | - Fix mysql drop unique index raises OperationalError. (#346) 112 | 113 | **Upgrade note:** 114 | 1. Use column name as unique key name for mysql 115 | 2. Drop support for Python3.7 116 | 117 | ## 0.7 118 | 119 | ### [0.7.2](../../releases/tag/v0.7.2) - 2023-07-20 120 | 121 | - Support virtual fields. 122 | - Fix modify multiple times. (#279) 123 | - Added `-i` and `--in-transaction` options to `aerich migrate` command. (#296) 124 | - Fix generates two semicolons in a row. (#301) 125 | 126 | ### 0.7.1 127 | 128 | - Fix syntax error with python3.8.10. (#265) 129 | - Fix sql generate error. (#263) 130 | - Fix initialize an empty database. (#267) 131 | 132 | ### 0.7.1rc1 133 | 134 | - Fix postgres sql error (#263) 135 | 136 | ### 0.7.0 137 | 138 | **Now aerich use `.py` file to record versions.** 139 | 140 | Upgrade Note: 141 | 142 | 1. Drop `aerich` table 143 | 2. Delete `migrations/models` folder 144 | 3. Run `aerich init-db` 145 | 146 | - Improve `inspectdb` adding support to `postgresql::numeric` data type 147 | - Add support for dynamically load DDL classes easing to add support to 148 | new databases without changing `Migrate` class logic 149 | - Fix decimal field change. (#246) 150 | - Support add/remove field with index. 151 | 152 | ## 0.6 153 | 154 | ### 0.6.3 155 | 156 | - Improve `inspectdb` and support `postgres` & `sqlite`. 157 | 158 | ### 0.6.2 159 | 160 | - Support migration for specified index. (#203) 161 | 162 | ### 0.6.1 163 | 164 | - Fix `pyproject.toml` not existing error. (#217) 165 | 166 | ### 0.6.0 167 | 168 | - Change default config file from `aerich.ini` to `pyproject.toml`. (#197) 169 | 170 | **Upgrade note:** 171 | 1. Run `aerich init -t config.TORTOISE_ORM`. 172 | 2. Remove `aerich.ini`. 173 | - Remove `pydantic` dependency. (#198) 174 | - `inspectdb` support `DATE`. (#215) 175 | 176 | ## 0.5 177 | 178 | ### 0.5.8 179 | 180 | - Support `indexes` change. (#193) 181 | 182 | ### 0.5.7 183 | 184 | - Fix no module found error. (#188) (#189) 185 | 186 | ### 0.5.6 187 | 188 | - Add `Command` class. (#148) (#141) (#123) (#106) 189 | - Fix: migrate doesn't use source_field in unique_together. (#181) 190 | 191 | ### 0.5.5 192 | 193 | - Fix KeyError: 'src_folder' after upgrading aerich to 0.5.4. (#176) 194 | - Fix MySQL 5.X rename column. 195 | - Fix `db_constraint` when fk changed. (#179) 196 | 197 | ### 0.5.4 198 | 199 | - Fix incorrect index creation order. (#151) 200 | - Not catch exception when import config. (#164) 201 | - Support `drop column` for sqlite. (#40) 202 | 203 | ### 0.5.3 204 | 205 | - Fix postgre alter null. (#142) 206 | - Fix default function when migrate. (#147) 207 | 208 | ### 0.5.2 209 | 210 | - Fix rename field on the field add. (#134) 211 | - Fix postgres field type change error. (#135) 212 | - Fix inspectdb for `FloatField`. (#138) 213 | - Support `rename table`. (#139) 214 | 215 | ### 0.5.1 216 | 217 | - Fix tortoise connections not being closed properly. (#120) 218 | - Fix bug for field change. (#119) 219 | - Fix drop model in the downgrade. (#132) 220 | 221 | ### 0.5.0 222 | 223 | - Refactor core code, now has no limitation for everything. 224 | 225 | ## 0.4 226 | 227 | ### 0.4.4 228 | 229 | - Fix unnecessary import. (#113) 230 | 231 | ### 0.4.3 232 | 233 | - Replace migrations separator to sql standard comment. 234 | - Add `inspectdb` command. 235 | 236 | ### 0.4.2 237 | 238 | - Use `pathlib` for path resolving. (#89) 239 | - Fix upgrade in new db. (#96) 240 | - Fix packaging error. (#92) 241 | 242 | ### 0.4.1 243 | 244 | - Bug fix. (#91 #93) 245 | 246 | ### 0.4.0 247 | 248 | - Use `.sql` instead of `.json` to store version file. 249 | - Add `rename` column support MySQL5. 250 | - Remove callable detection for defaults. (#87) 251 | - Fix `sqlite` stuck. (#90) 252 | 253 | ## 0.3 254 | 255 | ### 0.3.3 256 | 257 | - Fix encoding error. (#75) 258 | - Support multiple databases. (#68) 259 | - Compatible with models file in directory. (#70) 260 | 261 | ### 0.3.2 262 | 263 | - Fix migrate to new database error. (#62) 264 | 265 | ### 0.3.1 266 | 267 | - Fix first version error. 268 | - Fix init error. (#61) 269 | 270 | ### 0.3.0 271 | 272 | - Refactoring migrate logic, and this version is not compatible with previous version. 273 | - Now there don't need `old_models.py` and it store in database. 274 | - Upgrade steps: 275 | 1. Upgrade aerich version. 276 | 2. Drop aerich table in database. 277 | 3. Delete `migrations/{app}` folder and rerun `aerich init-db`. 278 | 4. Update model and `aerich migrate` normally. 279 | 280 | ## 0.2 281 | 282 | ### 0.2.5 283 | 284 | - Fix windows support. (#46) 285 | - Support `db_constraint` in fk, m2m should manual define table with fk. (#52) 286 | 287 | ### 0.2.4 288 | 289 | - Raise error with SQLite unsupported features. 290 | - Fix Postgres alter table. (#48) 291 | - Add `Rename` support. 292 | 293 | ### 0.2.3 294 | 295 | - Fix tortoise ssl config. 296 | - PostgreSQL add/drop index/unique. 297 | 298 | ### 0.2.2 299 | 300 | - Fix postgres drop fk. 301 | - Fix version sort. 302 | 303 | ### 0.2.1 304 | 305 | - Fix bug in windows. 306 | - Enhance PostgreSQL support. 307 | 308 | ### 0.2.0 309 | 310 | - Update model file find method. 311 | - Set `--safe` bool. 312 | 313 | ## 0.1 314 | 315 | ### 0.1.9 316 | 317 | - Fix default_connection when upgrade 318 | - Find default app instead of default. 319 | - Diff MySQL ddl. 320 | - Check tortoise config. 321 | 322 | ### 0.1.8 323 | 324 | - Fix upgrade error when migrate. 325 | - Fix init db sql error. 326 | - Support change column. 327 | 328 | ### 0.1.7 329 | 330 | - Exclude models.Aerich. 331 | - Add init record when init-db. 332 | - Fix version num str. 333 | 334 | ### 0.1.6 335 | 336 | - update dependency_links 337 | 338 | ### 0.1.5 339 | 340 | - Add sqlite and postgres support. 341 | - Fix dependency import. 342 | - Store versions in db. 343 | 344 | ### 0.1.4 345 | 346 | - Fix transaction and fields import. 347 | - Make unique index worked. 348 | - Add cli --version. 349 | 350 | ### 0.1.3 351 | 352 | - Support indexes and unique_together. 353 | 354 | ### 0.1.2 355 | 356 | - Now aerich support m2m. 357 | - Add cli cmd init-db. 358 | - Change cli options. 359 | 360 | ### 0.1.1 361 | 362 | - Now aerich is basic worked. 363 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright 2020 long2ice 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | checkfiles = aerich/ tests/ conftest.py 2 | py_warn = PYTHONDEVMODE=1 3 | MYSQL_HOST ?= "127.0.0.1" 4 | MYSQL_PORT ?= 3306 5 | MYSQL_PASS ?= "123456" 6 | POSTGRES_HOST ?= "127.0.0.1" 7 | POSTGRES_PORT ?= 5432 8 | POSTGRES_PASS ?= 123456 9 | 10 | up: 11 | @poetry update 12 | 13 | deps: 14 | @poetry install --all-extras --all-groups 15 | 16 | _style: 17 | @ruff check --fix $(checkfiles) 18 | @ruff format $(checkfiles) 19 | style: deps _style 20 | 21 | _check: 22 | @ruff format --check $(checkfiles) || (echo "Please run 'make style' to auto-fix style issues" && false) 23 | @ruff check $(checkfiles) 24 | @mypy $(checkfiles) 25 | @bandit -r aerich 26 | check: deps _check 27 | 28 | _lint: _build 29 | @ruff format $(checkfiles) 30 | ruff check --fix $(checkfiles) 31 | mypy $(checkfiles) 32 | bandit -c pyproject.toml -r $(checkfiles) 33 | twine check dist/* 34 | lint: deps _lint 35 | 36 | test: deps 37 | $(py_warn) TEST_DB=sqlite://:memory: pytest 38 | 39 | test_sqlite: 40 | $(py_warn) TEST_DB=sqlite://:memory: pytest 41 | 42 | test_mysql: 43 | $(py_warn) TEST_DB="mysql://root:$(MYSQL_PASS)@$(MYSQL_HOST):$(MYSQL_PORT)/test_\{\}" pytest -vv -s 44 | 45 | test_postgres: 46 | $(py_warn) TEST_DB="postgres://postgres:$(POSTGRES_PASS)@$(POSTGRES_HOST):$(POSTGRES_PORT)/test_\{\}" pytest -vv -s 47 | 48 | test_psycopg: 49 | $(py_warn) TEST_DB="psycopg://postgres:$(POSTGRES_PASS)@$(POSTGRES_HOST):$(POSTGRES_PORT)/test_\{\}" pytest -vv -s 50 | 51 | _testall: test_sqlite test_postgres test_mysql 52 | testall: deps _testall 53 | 54 | _build: 55 | @poetry build 56 | build: deps _build 57 | 58 | ci: build _check _testall 59 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Aerich 2 | 3 | [![image](https://img.shields.io/pypi/v/aerich.svg?style=flat)](https://pypi.python.org/pypi/aerich) 4 | [![image](https://img.shields.io/github/license/tortoise/aerich)](https://github.com/tortoise/aerich) 5 | [![image](https://github.com/tortoise/aerich/workflows/pypi/badge.svg)](https://github.com/tortoise/aerich/actions?query=workflow:pypi) 6 | [![image](https://github.com/tortoise/aerich/workflows/ci/badge.svg)](https://github.com/tortoise/aerich/actions?query=workflow:ci) 7 | ![Python Versions](https://img.shields.io/pypi/pyversions/aerich) 8 | 9 | English | [Русский](./README_RU.md) 10 | 11 | ## Introduction 12 | 13 | Aerich is a database migrations tool for TortoiseORM, which is like alembic for SQLAlchemy, or like Django ORM with 14 | it\'s own migration solution. 15 | 16 | ## Install 17 | 18 | Just install from pypi: 19 | 20 | ```shell 21 | pip install "aerich[toml]" 22 | ``` 23 | 24 | ## Quick Start 25 | 26 | ```shell 27 | > aerich -h 28 | 29 | Usage: aerich [OPTIONS] COMMAND [ARGS]... 30 | 31 | Options: 32 | -V, --version Show the version and exit. 33 | -c, --config TEXT Config file. [default: pyproject.toml] 34 | --app TEXT Tortoise-ORM app name. 35 | -h, --help Show this message and exit. 36 | 37 | Commands: 38 | downgrade Downgrade to specified version. 39 | heads Show current available heads in migrate location. 40 | history List all migrate items. 41 | init Init config file and generate root migrate location. 42 | init-db Generate schema and generate app migrate location. 43 | inspectdb Introspects the database tables to standard output as... 44 | migrate Generate migrate changes file. 45 | upgrade Upgrade to specified version. 46 | ``` 47 | 48 | ## Usage 49 | 50 | You need to add `aerich.models` to your `Tortoise-ORM` config first. Example: 51 | 52 | ```python 53 | TORTOISE_ORM = { 54 | "connections": {"default": "mysql://root:123456@127.0.0.1:3306/test"}, 55 | "apps": { 56 | "models": { 57 | "models": ["tests.models", "aerich.models"], 58 | "default_connection": "default", 59 | }, 60 | }, 61 | } 62 | ``` 63 | 64 | ### Initialization 65 | 66 | ```shell 67 | > aerich init -h 68 | 69 | Usage: aerich init [OPTIONS] 70 | 71 | Init config file and generate root migrate location. 72 | 73 | Options: 74 | -t, --tortoise-orm TEXT Tortoise-ORM config module dict variable, like 75 | settings.TORTOISE_ORM. [required] 76 | --location TEXT Migrate store location. [default: ./migrations] 77 | -s, --src_folder TEXT Folder of the source, relative to the project root. 78 | -h, --help Show this message and exit. 79 | ``` 80 | 81 | Initialize the config file and migrations location: 82 | 83 | ```shell 84 | > aerich init -t tests.backends.mysql.TORTOISE_ORM 85 | 86 | Success create migrate location ./migrations 87 | Success write config to pyproject.toml 88 | ``` 89 | 90 | *Note*: aerich will import the config file when running init-db/migrate/upgrade/heads/history commands, so it is better to keep this file simple and clean. 91 | 92 | ### Init db 93 | 94 | ```shell 95 | > aerich init-db 96 | 97 | Success create app migrate location ./migrations/models 98 | Success generate schema for app "models" 99 | ``` 100 | 101 | If your Tortoise-ORM app is not the default `models`, you must specify the correct app via `--app`, 102 | e.g. `aerich --app other_models init-db`. 103 | 104 | ### Update models and make migrate 105 | 106 | ```shell 107 | > aerich migrate --name drop_column 108 | 109 | Success migrate 1_202029051520102929_drop_column.py 110 | ``` 111 | 112 | Format of migrate filename is 113 | `{version_num}_{datetime}_{name|update}.py`. 114 | 115 | If `aerich` guesses you are renaming a column, it will ask `Rename {old_column} to {new_column} [True]`. You can choose 116 | `True` to rename column without column drop, or choose `False` to drop the column then create. Note that the latter may 117 | lose data. 118 | 119 | If you need to manually write migration, you could generate empty file: 120 | 121 | ```shell 122 | > aerich migrate --name add_index --empty 123 | 124 | Success migrate 1_202326122220101229_add_index.py 125 | ``` 126 | 127 | ### Upgrade to latest version 128 | 129 | ```shell 130 | > aerich upgrade 131 | 132 | Success upgrade 1_202029051520102929_drop_column.py 133 | ``` 134 | 135 | Now your db is migrated to latest. 136 | 137 | ### Downgrade to specified version 138 | 139 | ```shell 140 | > aerich downgrade -h 141 | 142 | Usage: aerich downgrade [OPTIONS] 143 | 144 | Downgrade to specified version. 145 | 146 | Options: 147 | -v, --version INTEGER Specified version, default to last. [default: -1] 148 | -d, --delete Delete version files at the same time. [default: 149 | False] 150 | 151 | --yes Confirm the action without prompting. 152 | -h, --help Show this message and exit. 153 | ``` 154 | 155 | ```shell 156 | > aerich downgrade 157 | 158 | Success downgrade 1_202029051520102929_drop_column.py 159 | ``` 160 | 161 | Now your db is rolled back to the specified version. 162 | 163 | ### Show history 164 | 165 | ```shell 166 | > aerich history 167 | 168 | 1_202029051520102929_drop_column.py 169 | ``` 170 | 171 | ### Show heads to be migrated 172 | 173 | ```shell 174 | > aerich heads 175 | 176 | 1_202029051520102929_drop_column.py 177 | ``` 178 | 179 | ### Inspect db tables to TortoiseORM model 180 | 181 | Currently `inspectdb` support MySQL & Postgres & SQLite. 182 | 183 | ```shell 184 | Usage: aerich inspectdb [OPTIONS] 185 | 186 | Introspects the database tables to standard output as TortoiseORM model. 187 | 188 | Options: 189 | -t, --table TEXT Which tables to inspect. 190 | -h, --help Show this message and exit. 191 | ``` 192 | 193 | Inspect all tables and print to console: 194 | 195 | ```shell 196 | aerich --app models inspectdb 197 | ``` 198 | 199 | Inspect a specified table in the default app and redirect to `models.py`: 200 | 201 | ```shell 202 | aerich inspectdb -t user > models.py 203 | ``` 204 | 205 | For example, you table is: 206 | 207 | ```sql 208 | CREATE TABLE `test` 209 | ( 210 | `id` int NOT NULL AUTO_INCREMENT, 211 | `decimal` decimal(10, 2) NOT NULL, 212 | `date` date DEFAULT NULL, 213 | `datetime` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, 214 | `time` time DEFAULT NULL, 215 | `float` float DEFAULT NULL, 216 | `string` varchar(200) COLLATE utf8mb4_general_ci DEFAULT NULL, 217 | `tinyint` tinyint DEFAULT NULL, 218 | PRIMARY KEY (`id`), 219 | KEY `asyncmy_string_index` (`string`) 220 | ) ENGINE = InnoDB 221 | DEFAULT CHARSET = utf8mb4 222 | COLLATE = utf8mb4_general_ci 223 | ``` 224 | 225 | Now run `aerich inspectdb -t test` to see the generated model: 226 | 227 | ```python 228 | from tortoise import Model, fields 229 | 230 | 231 | class Test(Model): 232 | date = fields.DateField(null=True) 233 | datetime = fields.DatetimeField(auto_now=True) 234 | decimal = fields.DecimalField(max_digits=10, decimal_places=2) 235 | float = fields.FloatField(null=True) 236 | id = fields.IntField(primary_key=True) 237 | string = fields.CharField(max_length=200, null=True) 238 | time = fields.TimeField(null=True) 239 | tinyint = fields.BooleanField(null=True) 240 | ``` 241 | 242 | Note that this command is limited and can't infer some fields, such as `IntEnumField`, `ForeignKeyField`, and others. 243 | 244 | ### Multiple databases 245 | 246 | ```python 247 | tortoise_orm = { 248 | "connections": { 249 | "default": "postgres://postgres_user:postgres_pass@127.0.0.1:5432/db1", 250 | "second": "postgres://postgres_user:postgres_pass@127.0.0.1:5432/db2", 251 | }, 252 | "apps": { 253 | "models": {"models": ["tests.models", "aerich.models"], "default_connection": "default"}, 254 | "models_second": {"models": ["tests.models_second"], "default_connection": "second", }, 255 | }, 256 | } 257 | ``` 258 | 259 | You only need to specify `aerich.models` in one app, and must specify `--app` when running `aerich migrate` and so on, e.g. `aerich --app models_second migrate`. 260 | 261 | ## Restore `aerich` workflow 262 | 263 | In some cases, such as broken changes from upgrade of `aerich`, you can't run `aerich migrate` or `aerich upgrade`, you 264 | can make the following steps: 265 | 266 | 1. drop `aerich` table. 267 | 2. delete `migrations/{app}` directory. 268 | 3. rerun `aerich init-db`. 269 | 270 | Note that these actions is safe, also you can do that to reset your migrations if your migration files is too many. 271 | 272 | ## Use `aerich` in application 273 | 274 | You can use `aerich` out of cli by use `Command` class. 275 | 276 | ```python 277 | from aerich import Command 278 | 279 | async with Command(tortoise_config=config, app='models') as command: 280 | await command.migrate('test') 281 | await command.upgrade() 282 | ``` 283 | 284 | ## Upgrade/Downgrade with `--fake` option 285 | 286 | Marks the migrations up to the latest one(or back to the target one) as applied, but without actually running the SQL to change your database schema. 287 | 288 | - Upgrade 289 | 290 | ```bash 291 | aerich upgrade --fake 292 | aerich --app models upgrade --fake 293 | ``` 294 | - Downgrade 295 | 296 | ```bash 297 | aerich downgrade --fake -v 2 298 | aerich --app models downgrade --fake -v 2 299 | ``` 300 | 301 | ### Ignore tables 302 | 303 | You can tell aerich to ignore table by setting `managed=False` in the `Meta` class, e.g.: 304 | ```py 305 | class MyModel(Model): 306 | class Meta: 307 | managed = False 308 | ``` 309 | **Note** `managed=False` does not recognized by `tortoise-orm` and `aerich init-db`, it is only for `aerich migrate`. 310 | 311 | ## License 312 | 313 | This project is licensed under the 314 | [Apache-2.0](https://github.com/long2ice/aerich/blob/master/LICENSE) License. 315 | -------------------------------------------------------------------------------- /README_RU.md: -------------------------------------------------------------------------------- 1 | # Aerich 2 | 3 | [![image](https://img.shields.io/pypi/v/aerich.svg?style=flat)](https://pypi.python.org/pypi/aerich) 4 | [![image](https://img.shields.io/github/license/tortoise/aerich)](https://github.com/tortoise/aerich) 5 | [![image](https://github.com/tortoise/aerich/workflows/pypi/badge.svg)](https://github.com/tortoise/aerich/actions?query=workflow:pypi) 6 | [![image](https://github.com/tortoise/aerich/workflows/ci/badge.svg)](https://github.com/tortoise/aerich/actions?query=workflow:ci) 7 | 8 | [English](./README.md) | Русский 9 | 10 | ## Введение 11 | 12 | Aerich - это инструмент для миграции базы данных для TortoiseORM, который аналогичен Alembic для SQLAlchemy или встроенному решению миграций в Django ORM. 13 | 14 | ## Установка 15 | 16 | Просто установите из pypi: 17 | 18 | ```shell 19 | pip install aerich 20 | ``` 21 | 22 | ## Быстрый старт 23 | 24 | ```shell 25 | > aerich -h 26 | 27 | Usage: aerich [OPTIONS] COMMAND [ARGS]... 28 | 29 | Options: 30 | -V, --version Show the version and exit. 31 | -c, --config TEXT Config file. [default: pyproject.toml] 32 | --app TEXT Tortoise-ORM app name. 33 | -h, --help Show this message and exit. 34 | 35 | Commands: 36 | downgrade Downgrade to specified version. 37 | heads Show current available heads in migrate location. 38 | history List all migrate items. 39 | init Init config file and generate root migrate location. 40 | init-db Generate schema and generate app migrate location. 41 | inspectdb Introspects the database tables to standard output as... 42 | migrate Generate migrate changes file. 43 | upgrade Upgrade to specified version. 44 | ``` 45 | 46 | ## Использование 47 | 48 | Сначала вам нужно добавить aerich.models в конфигурацию вашего Tortoise-ORM. Пример: 49 | 50 | ```python 51 | TORTOISE_ORM = { 52 | "connections": {"default": "mysql://root:123456@127.0.0.1:3306/test"}, 53 | "apps": { 54 | "models": { 55 | "models": ["tests.models", "aerich.models"], 56 | "default_connection": "default", 57 | }, 58 | }, 59 | } 60 | ``` 61 | 62 | ### Инициализация 63 | 64 | ```shell 65 | > aerich init -h 66 | 67 | Usage: aerich init [OPTIONS] 68 | 69 | Init config file and generate root migrate location. 70 | 71 | Options: 72 | -t, --tortoise-orm TEXT Tortoise-ORM config module dict variable, like 73 | settings.TORTOISE_ORM. [required] 74 | --location TEXT Migrate store location. [default: ./migrations] 75 | -s, --src_folder TEXT Folder of the source, relative to the project root. 76 | -h, --help Show this message and exit. 77 | ``` 78 | 79 | Инициализируйте файл конфигурации и задайте местоположение миграций: 80 | 81 | ```shell 82 | > aerich init -t tests.backends.mysql.TORTOISE_ORM 83 | 84 | Success create migrate location ./migrations 85 | Success write config to pyproject.toml 86 | ``` 87 | 88 | ### Инициализация базы данных 89 | 90 | ```shell 91 | > aerich init-db 92 | 93 | Success create app migrate location ./migrations/models 94 | Success generate schema for app "models" 95 | ``` 96 | 97 | Если ваше приложение Tortoise-ORM не является приложением по умолчанию с именем models, вы должны указать правильное имя приложения с помощью параметра --app, например: aerich --app other_models init-db. 98 | 99 | ### Обновление моделей и создание миграции 100 | 101 | ```shell 102 | > aerich migrate --name drop_column 103 | 104 | Success migrate 1_202029051520102929_drop_column.py 105 | ``` 106 | 107 | Формат имени файла миграции следующий: `{версия}_{дата_и_время}_{имя|обновление}.py`. 108 | 109 | Если aerich предполагает, что вы переименовываете столбец, он спросит: 110 | Переименовать `{старый_столбец} в {новый_столбец} [True]`. Вы можете выбрать `True`, 111 | чтобы переименовать столбец без удаления столбца, или выбрать `False`, чтобы удалить столбец, 112 | а затем создать новый. Обратите внимание, что последний вариант может привести к потере данных. 113 | 114 | 115 | ### Обновление до последней версии 116 | 117 | ```shell 118 | > aerich upgrade 119 | 120 | Success upgrade 1_202029051520102929_drop_column.py 121 | ``` 122 | 123 | Теперь ваша база данных обновлена до последней версии. 124 | 125 | ### Откат до указанной версии 126 | 127 | ```shell 128 | > aerich downgrade -h 129 | 130 | Usage: aerich downgrade [OPTIONS] 131 | 132 | Downgrade to specified version. 133 | 134 | Options: 135 | -v, --version INTEGER Specified version, default to last. [default: -1] 136 | -d, --delete Delete version files at the same time. [default: 137 | False] 138 | 139 | --yes Confirm the action without prompting. 140 | -h, --help Show this message and exit. 141 | ``` 142 | 143 | ```shell 144 | > aerich downgrade 145 | 146 | Success downgrade 1_202029051520102929_drop_column.py 147 | ``` 148 | 149 | Теперь ваша база данных откатилась до указанной версии. 150 | 151 | ### Показать историю 152 | 153 | ```shell 154 | > aerich history 155 | 156 | 1_202029051520102929_drop_column.py 157 | ``` 158 | 159 | ### Чтобы узнать, какие миграции должны быть применены, можно использовать команду: 160 | 161 | ```shell 162 | > aerich heads 163 | 164 | 1_202029051520102929_drop_column.py 165 | ``` 166 | 167 | ### Осмотр таблиц базы данных для модели TortoiseORM 168 | 169 | В настоящее время inspectdb поддерживает MySQL, Postgres и SQLite. 170 | 171 | ```shell 172 | Usage: aerich inspectdb [OPTIONS] 173 | 174 | Introspects the database tables to standard output as TortoiseORM model. 175 | 176 | Options: 177 | -t, --table TEXT Which tables to inspect. 178 | -h, --help Show this message and exit. 179 | ``` 180 | 181 | Посмотреть все таблицы и вывести их на консоль: 182 | 183 | ```shell 184 | aerich --app models inspectdb 185 | ``` 186 | 187 | Осмотреть указанную таблицу в приложении по умолчанию и перенаправить в models.py: 188 | 189 | ```shell 190 | aerich inspectdb -t user > models.py 191 | ``` 192 | 193 | Например, ваша таблица выглядит следующим образом: 194 | 195 | ```sql 196 | CREATE TABLE `test` 197 | ( 198 | `id` int NOT NULL AUTO_INCREMENT, 199 | `decimal` decimal(10, 2) NOT NULL, 200 | `date` date DEFAULT NULL, 201 | `datetime` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, 202 | `time` time DEFAULT NULL, 203 | `float` float DEFAULT NULL, 204 | `string` varchar(200) COLLATE utf8mb4_general_ci DEFAULT NULL, 205 | `tinyint` tinyint DEFAULT NULL, 206 | PRIMARY KEY (`id`), 207 | KEY `asyncmy_string_index` (`string`) 208 | ) ENGINE = InnoDB 209 | DEFAULT CHARSET = utf8mb4 210 | COLLATE = utf8mb4_general_ci 211 | ``` 212 | 213 | Теперь выполните команду aerich inspectdb -t test, чтобы увидеть сгенерированную модель: 214 | 215 | ```python 216 | from tortoise import Model, fields 217 | 218 | 219 | class Test(Model): 220 | date = fields.DateField(null=True, ) 221 | datetime = fields.DatetimeField(auto_now=True, ) 222 | decimal = fields.DecimalField(max_digits=10, decimal_places=2, ) 223 | float = fields.FloatField(null=True, ) 224 | id = fields.IntField(pk=True, ) 225 | string = fields.CharField(max_length=200, null=True, ) 226 | time = fields.TimeField(null=True, ) 227 | tinyint = fields.BooleanField(null=True, ) 228 | ``` 229 | 230 | Обратите внимание, что эта команда имеет ограничения и не может автоматически определить некоторые поля, такие как `IntEnumField`, `ForeignKeyField` и другие. 231 | 232 | ### Несколько баз данных 233 | 234 | ```python 235 | tortoise_orm = { 236 | "connections": { 237 | "default": expand_db_url(db_url, True), 238 | "second": expand_db_url(db_url_second, True), 239 | }, 240 | "apps": { 241 | "models": {"models": ["tests.models", "aerich.models"], "default_connection": "default"}, 242 | "models_second": {"models": ["tests.models_second"], "default_connection": "second", }, 243 | }, 244 | } 245 | ``` 246 | 247 | Вам нужно указать `aerich.models` только в одном приложении и должны указывать `--app` при запуске команды `aerich migrate` и т.д. 248 | 249 | ## Восстановление рабочего процесса aerich 250 | 251 | В некоторых случаях, например, при возникновении проблем после обновления `aerich`, вы не можете запустить `aerich migrate` или `aerich upgrade`. В таком случае вы можете выполнить следующие шаги: 252 | 253 | 1. удалите таблицы `aerich`. 254 | 2. удалите директорию `migrations/{app}`. 255 | 3. rerun `aerich init-db`. 256 | 257 | Обратите внимание, что эти действия безопасны, и вы можете использовать их для сброса миграций, если у вас слишком много файлов миграции. 258 | 259 | ## Использование aerich в приложении 260 | 261 | Вы можете использовать `aerich` вне командной строки, используя класс `Command`. 262 | 263 | ```python 264 | from aerich import Command 265 | 266 | command = Command(tortoise_config=config, app='models') 267 | await command.init() 268 | await command.migrate('test') 269 | ``` 270 | 271 | ## Лицензия 272 | 273 | Этот проект лицензирован в соответствии с лицензией 274 | [Apache-2.0](https://github.com/long2ice/aerich/blob/master/LICENSE) Лицензия. -------------------------------------------------------------------------------- /aerich/__init__.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import os 4 | import platform 5 | from contextlib import AbstractAsyncContextManager 6 | from pathlib import Path 7 | from typing import TYPE_CHECKING, Literal, overload 8 | 9 | import tortoise 10 | from tortoise import BaseDBAsyncClient, Tortoise, connections 11 | from tortoise.exceptions import OperationalError 12 | from tortoise.transactions import in_transaction 13 | from tortoise.utils import generate_schema_for_client, get_schema_sql 14 | 15 | from aerich.exceptions import DowngradeError 16 | from aerich.inspectdb.mysql import InspectMySQL 17 | from aerich.inspectdb.postgres import InspectPostgres 18 | from aerich.inspectdb.sqlite import InspectSQLite 19 | from aerich.migrate import MIGRATE_TEMPLATE, Migrate 20 | from aerich.models import Aerich 21 | from aerich.utils import ( 22 | get_app_connection, 23 | get_app_connection_name, 24 | get_models_describe, 25 | import_py_file, 26 | ) 27 | 28 | if TYPE_CHECKING: 29 | from tortoise import Model 30 | from tortoise.fields.relational import ManyToManyFieldInstance 31 | 32 | from aerich.inspectdb import Inspect 33 | 34 | 35 | def _init_asyncio_patch() -> None: 36 | """ 37 | Select compatible event loop for psycopg3. 38 | 39 | As of Python 3.8+, the default event loop on Windows is `proactor`, 40 | however psycopg3 requires the old default "selector" event loop. 41 | See https://www.psycopg.org/psycopg3/docs/advanced/async.html 42 | """ 43 | if platform.system() == "Windows": 44 | try: 45 | from asyncio import WindowsSelectorEventLoopPolicy # type:ignore 46 | except ImportError: 47 | pass # Can't assign a policy which doesn't exist. 48 | else: 49 | from asyncio import get_event_loop_policy, set_event_loop_policy 50 | 51 | if not isinstance(get_event_loop_policy(), WindowsSelectorEventLoopPolicy): 52 | set_event_loop_policy(WindowsSelectorEventLoopPolicy()) 53 | 54 | 55 | def _init_tortoise_0_24_1_patch() -> None: 56 | # this patch is for "tortoise-orm==0.24.1" to fix: 57 | # https://github.com/tortoise/tortoise-orm/issues/1893 58 | if tortoise.__version__ != "0.24.1": 59 | return 60 | import re 61 | from typing import cast 62 | 63 | from tortoise.backends.base.schema_generator import BaseSchemaGenerator 64 | 65 | def _get_m2m_tables( 66 | self: BaseSchemaGenerator, 67 | model: type[Model], 68 | db_table: str, 69 | safe: bool, 70 | models_tables: list[str], 71 | ) -> list[str]: # Copied from tortoise-orm 72 | m2m_tables_for_create = [] 73 | for m2m_field in model._meta.m2m_fields: 74 | field_object = cast("ManyToManyFieldInstance", model._meta.fields_map[m2m_field]) 75 | if field_object._generated or field_object.through in models_tables: 76 | continue 77 | backward_key, forward_key = field_object.backward_key, field_object.forward_key 78 | if field_object.db_constraint: 79 | backward_fk = self._create_fk_string( 80 | "", 81 | backward_key, 82 | db_table, 83 | model._meta.db_pk_column, 84 | field_object.on_delete, 85 | "", 86 | ) 87 | forward_fk = self._create_fk_string( 88 | "", 89 | forward_key, 90 | field_object.related_model._meta.db_table, 91 | field_object.related_model._meta.db_pk_column, 92 | field_object.on_delete, 93 | "", 94 | ) 95 | else: 96 | backward_fk = forward_fk = "" 97 | exists = "IF NOT EXISTS " if safe else "" 98 | through_table_name = field_object.through 99 | backward_type = self._get_pk_field_sql_type(model._meta.pk) 100 | forward_type = self._get_pk_field_sql_type(field_object.related_model._meta.pk) 101 | comment = "" 102 | if desc := field_object.description: 103 | comment = self._table_comment_generator(table=through_table_name, comment=desc) 104 | m2m_create_string = self.M2M_TABLE_TEMPLATE.format( 105 | exists=exists, 106 | table_name=through_table_name, 107 | backward_fk=backward_fk, 108 | forward_fk=forward_fk, 109 | backward_key=backward_key, 110 | backward_type=backward_type, 111 | forward_key=forward_key, 112 | forward_type=forward_type, 113 | extra=self._table_generate_extra(table=field_object.through), 114 | comment=comment, 115 | ) 116 | if not field_object.db_constraint: 117 | m2m_create_string = m2m_create_string.replace( 118 | """, 119 | , 120 | """, 121 | "", 122 | ) # may have better way 123 | m2m_create_string += self._post_table_hook() 124 | if getattr(field_object, "create_unique_index", field_object.unique): 125 | unique_index_create_sql = self._get_unique_index_sql( 126 | exists, through_table_name, [backward_key, forward_key] 127 | ) 128 | if unique_index_create_sql.endswith(";"): 129 | m2m_create_string += "\n" + unique_index_create_sql 130 | else: 131 | lines = m2m_create_string.splitlines() 132 | lines[-2] += "," 133 | indent = m.group() if (m := re.match(r"\s+", lines[-2])) else "" 134 | lines.insert(-1, indent + unique_index_create_sql) 135 | m2m_create_string = "\n".join(lines) 136 | m2m_tables_for_create.append(m2m_create_string) 137 | return m2m_tables_for_create 138 | 139 | setattr(BaseSchemaGenerator, "_get_m2m_tables", _get_m2m_tables) 140 | 141 | 142 | _init_asyncio_patch() 143 | _init_tortoise_0_24_1_patch() 144 | 145 | 146 | class Command(AbstractAsyncContextManager): 147 | def __init__( 148 | self, 149 | tortoise_config: dict, 150 | app: str = "models", 151 | location: str = "./migrations", 152 | ) -> None: 153 | self.tortoise_config = tortoise_config 154 | self.app = app 155 | self.location = location 156 | Migrate.app = app 157 | 158 | async def init(self) -> None: 159 | await Migrate.init(self.tortoise_config, self.app, self.location) 160 | 161 | async def __aenter__(self) -> Command: 162 | await self.init() 163 | return self 164 | 165 | async def close(self) -> None: 166 | await connections.close_all() 167 | 168 | async def __aexit__(self, *args, **kw) -> None: 169 | await self.close() 170 | 171 | async def _upgrade( 172 | self, conn: BaseDBAsyncClient, version_file: str, fake: bool = False 173 | ) -> None: 174 | file_path = Path(Migrate.migrate_location, version_file) 175 | m = import_py_file(file_path) 176 | upgrade = m.upgrade 177 | if not fake: 178 | await conn.execute_script(await upgrade(conn)) 179 | await Aerich.create( 180 | version=version_file, 181 | app=self.app, 182 | content=get_models_describe(self.app), 183 | ) 184 | 185 | async def upgrade(self, run_in_transaction: bool = True, fake: bool = False) -> list[str]: 186 | migrated = [] 187 | for version_file in Migrate.get_all_version_files(): 188 | try: 189 | exists = await Aerich.exists(version=version_file, app=self.app) 190 | except OperationalError: 191 | exists = False 192 | if not exists: 193 | app_conn_name = get_app_connection_name(self.tortoise_config, self.app) 194 | if run_in_transaction: 195 | async with in_transaction(app_conn_name) as conn: 196 | await self._upgrade(conn, version_file, fake=fake) 197 | else: 198 | app_conn = get_app_connection(self.tortoise_config, self.app) 199 | await self._upgrade(app_conn, version_file, fake=fake) 200 | migrated.append(version_file) 201 | return migrated 202 | 203 | async def downgrade(self, version: int, delete: bool, fake: bool = False) -> list[str]: 204 | ret: list[str] = [] 205 | if version == -1: 206 | specified_version = await Migrate.get_last_version() 207 | else: 208 | specified_version = await Aerich.filter( 209 | app=self.app, version__startswith=f"{version}_" 210 | ).first() 211 | if not specified_version: 212 | raise DowngradeError("No specified version found") 213 | if version == -1: 214 | versions = [specified_version] 215 | else: 216 | versions = await Aerich.filter(app=self.app, pk__gte=specified_version.pk) 217 | for version_obj in versions: 218 | file = version_obj.version 219 | async with in_transaction( 220 | get_app_connection_name(self.tortoise_config, self.app) 221 | ) as conn: 222 | file_path = Path(Migrate.migrate_location, file) 223 | m = import_py_file(file_path) 224 | downgrade = m.downgrade 225 | downgrade_sql = await downgrade(conn) 226 | if not downgrade_sql.strip(): 227 | raise DowngradeError("No downgrade items found") 228 | if not fake: 229 | await conn.execute_script(downgrade_sql) 230 | await version_obj.delete() 231 | if delete: 232 | os.unlink(file_path) 233 | ret.append(file) 234 | return ret 235 | 236 | async def heads(self) -> list[str]: 237 | ret = [] 238 | versions = Migrate.get_all_version_files() 239 | for version in versions: 240 | if not await Aerich.exists(version=version, app=self.app): 241 | ret.append(version) 242 | return ret 243 | 244 | async def history(self) -> list[str]: 245 | versions = Migrate.get_all_version_files() 246 | return [version for version in versions] 247 | 248 | async def inspectdb(self, tables: list[str] | None = None) -> str: 249 | connection = get_app_connection(self.tortoise_config, self.app) 250 | dialect = connection.schema_generator.DIALECT 251 | if dialect == "mysql": 252 | cls: type[Inspect] = InspectMySQL 253 | elif dialect == "postgres": 254 | cls = InspectPostgres 255 | elif dialect == "sqlite": 256 | cls = InspectSQLite 257 | else: 258 | raise NotImplementedError(f"{dialect} is not supported") 259 | inspect = cls(connection, tables) 260 | return await inspect.inspect() 261 | 262 | @overload 263 | async def migrate( 264 | self, name: str = "update", empty: bool = False, no_input: Literal[True] = True 265 | ) -> str: ... 266 | 267 | @overload 268 | async def migrate( 269 | self, name: str = "update", empty: bool = False, no_input: bool = False 270 | ) -> str | None: ... 271 | 272 | async def migrate( 273 | self, name: str = "update", empty: bool = False, no_input: bool = False 274 | ) -> str | None: 275 | # return None if same version migration file already exists, and new one not generated 276 | return await Migrate.migrate(name, empty, no_input) 277 | 278 | async def init_db(self, safe: bool) -> None: 279 | location = self.location 280 | app = self.app 281 | 282 | await Tortoise.init(config=self.tortoise_config) 283 | connection = get_app_connection(self.tortoise_config, app) 284 | 285 | dirname = Path(location, app) 286 | if not dirname.exists(): 287 | dirname.mkdir(parents=True) 288 | else: 289 | # If directory is empty, go ahead, otherwise raise FileExistsError 290 | for unexpected_file in dirname.glob("*"): 291 | raise FileExistsError(str(unexpected_file)) 292 | 293 | await generate_schema_for_client(connection, safe) 294 | 295 | schema = get_schema_sql(connection, safe) 296 | 297 | version = await Migrate.generate_version() 298 | await Aerich.create( 299 | version=version, 300 | app=app, 301 | content=get_models_describe(app), 302 | ) 303 | version_file = Path(dirname, version) 304 | content = MIGRATE_TEMPLATE.format(upgrade_sql=schema, downgrade_sql="") 305 | with open(version_file, "w", encoding="utf-8") as f: 306 | f.write(content) 307 | -------------------------------------------------------------------------------- /aerich/__main__.py: -------------------------------------------------------------------------------- 1 | from .cli import main 2 | 3 | main() 4 | -------------------------------------------------------------------------------- /aerich/_compat.py: -------------------------------------------------------------------------------- 1 | # mypy: disable-error-code="no-redef" 2 | from __future__ import annotations 3 | 4 | import sys 5 | from types import ModuleType 6 | 7 | import tortoise 8 | 9 | if sys.version_info >= (3, 11): 10 | import tomllib 11 | else: 12 | try: 13 | import tomli as tomllib 14 | except ImportError: 15 | import tomlkit as tomllib 16 | 17 | 18 | def imports_tomlkit() -> ModuleType: 19 | try: 20 | import tomli_w as tomlkit 21 | except ImportError: 22 | import tomlkit 23 | return tomlkit 24 | 25 | 26 | def tortoise_version_less_than(version: str) -> bool: 27 | # The min version of tortoise is '0.11.0', so we can compare it by a `<`, 28 | return tortoise.__version__ < version 29 | -------------------------------------------------------------------------------- /aerich/cli.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import os 4 | from pathlib import Path 5 | from typing import cast 6 | 7 | import asyncclick as click 8 | from asyncclick import Context, UsageError 9 | from tortoise.exceptions import ConfigurationError 10 | 11 | from aerich import Command 12 | from aerich._compat import imports_tomlkit, tomllib 13 | from aerich.enums import Color 14 | from aerich.exceptions import DowngradeError 15 | from aerich.utils import add_src_path, get_tortoise_config 16 | from aerich.version import __version__ 17 | 18 | CONFIG_DEFAULT_VALUES = { 19 | "src_folder": ".", 20 | } 21 | 22 | 23 | def _patch_context_to_close_tortoise_connections_when_exit() -> None: 24 | from tortoise import Tortoise, connections 25 | 26 | origin_aexit = Context.__aexit__ 27 | 28 | async def aexit(*args, **kw) -> None: 29 | await origin_aexit(*args, **kw) 30 | if Tortoise._inited: 31 | await connections.close_all() 32 | 33 | Context.__aexit__ = aexit # type:ignore[method-assign] 34 | 35 | 36 | _patch_context_to_close_tortoise_connections_when_exit() 37 | 38 | 39 | def _check_aerich_models_included(tortoise_config: dict, e: Exception | None = None) -> None: 40 | all_models = [ 41 | m for model in tortoise_config.get("apps", {}).values() for m in model.get("models", []) 42 | ] 43 | if all_models and "aerich.models" not in all_models: 44 | raise UsageError( 45 | "You have to add 'aerich.models' in the models of your tortoise config" 46 | ) from e 47 | 48 | 49 | @click.group(context_settings={"help_option_names": ["-h", "--help"]}) 50 | @click.version_option(__version__, "-V", "--version") 51 | @click.option( 52 | "-c", 53 | "--config", 54 | default="pyproject.toml", 55 | show_default=True, 56 | help="Config file.", 57 | ) 58 | @click.option("--app", required=False, help="Tortoise-ORM app name.") 59 | @click.pass_context 60 | async def cli(ctx: Context, config: str, app: str) -> None: 61 | ctx.ensure_object(dict) 62 | ctx.obj["config_file"] = config 63 | 64 | invoked_subcommand = ctx.invoked_subcommand 65 | if invoked_subcommand != "init": 66 | config_path = Path(config) 67 | if not config_path.exists(): 68 | raise UsageError( 69 | "You need to run `aerich init` first to create the config file.", ctx=ctx 70 | ) 71 | content = config_path.read_text("utf-8") 72 | doc: dict = tomllib.loads(content) 73 | try: 74 | tool = cast("dict[str, str]", doc["tool"]["aerich"]) 75 | location = tool["location"] 76 | tortoise_orm = tool["tortoise_orm"] 77 | src_folder = tool.get("src_folder", CONFIG_DEFAULT_VALUES["src_folder"]) 78 | except KeyError as e: 79 | raise UsageError( 80 | "You need run `aerich init` again when upgrading to aerich 0.6.0+." 81 | ) from e 82 | add_src_path(src_folder) 83 | tortoise_config = get_tortoise_config(ctx, tortoise_orm) 84 | if not app: 85 | try: 86 | apps_config = cast(dict, tortoise_config["apps"]) 87 | except KeyError: 88 | raise UsageError('Config must define "apps" section') from None 89 | app = list(apps_config.keys())[0] 90 | command = Command(tortoise_config=tortoise_config, app=app, location=location) 91 | ctx.obj["command"] = command 92 | if invoked_subcommand == "init-db": 93 | _check_aerich_models_included(tortoise_config) 94 | else: 95 | if not Path(location, app).exists(): 96 | raise UsageError( 97 | "You need to run `aerich init-db` first to initialize the database.", ctx=ctx 98 | ) 99 | try: 100 | await command.init() 101 | except ConfigurationError as e: 102 | _check_aerich_models_included(tortoise_config, e) 103 | raise e 104 | 105 | 106 | @cli.command(help="Generate a migration file for the current state of the models.") 107 | @click.option("--name", default="update", show_default=True, help="Migration name.") 108 | @click.option("--empty", default=False, is_flag=True, help="Generate an empty migration file.") 109 | @click.option("--no-input", default=False, is_flag=True, help="Do not ask for prompt.") 110 | @click.pass_context 111 | async def migrate(ctx: Context, name: str, empty: bool, no_input: bool) -> None: 112 | command = ctx.obj["command"] 113 | ret = await command.migrate(name, empty, no_input) 114 | if ret is None: 115 | return click.secho( 116 | "Aborted! You may need to run `aerich heads` to list avaliable unapplied migrations.", 117 | fg=Color.yellow, 118 | ) 119 | if not ret: 120 | return click.secho("No changes detected", fg=Color.yellow) 121 | click.secho(f"Success creating migration file {ret}", fg=Color.green) 122 | 123 | 124 | @cli.command(help="Upgrade to specified migration version.") 125 | @click.option( 126 | "--in-transaction", 127 | "-i", 128 | default=True, 129 | type=bool, 130 | help="Make migrations in a single transaction or not. Can be helpful for large migrations or creating concurrent indexes.", 131 | ) 132 | @click.option( 133 | "--fake", 134 | default=False, 135 | is_flag=True, 136 | help="Mark migrations as run without actually running them.", 137 | ) 138 | @click.pass_context 139 | async def upgrade(ctx: Context, in_transaction: bool, fake: bool) -> None: 140 | command = ctx.obj["command"] 141 | migrated = await command.upgrade(run_in_transaction=in_transaction, fake=fake) 142 | if not migrated: 143 | return click.secho("No upgrade items found", fg=Color.yellow) 144 | for version_file in migrated: 145 | if fake: 146 | click.echo(f"Upgrading to {version_file}... " + click.style("FAKED", fg=Color.green)) 147 | else: 148 | click.secho(f"Success upgrading to {version_file}", fg=Color.green) 149 | 150 | 151 | @cli.command(help="Downgrade to specified version.") 152 | @click.option( 153 | "-v", 154 | "--version", 155 | default=-1, 156 | type=int, 157 | show_default=False, 158 | help="Specified version, default to last migration.", 159 | ) 160 | @click.option( 161 | "-d", 162 | "--delete", 163 | is_flag=True, 164 | default=False, 165 | show_default=True, 166 | help="Also delete the migration files.", 167 | ) 168 | @click.option( 169 | "--fake", 170 | default=False, 171 | is_flag=True, 172 | help="Mark migrations as run without actually running them.", 173 | ) 174 | @click.pass_context 175 | @click.confirmation_option( 176 | prompt="Downgrade is dangerous: you might lose your data! Are you sure?", 177 | ) 178 | async def downgrade(ctx: Context, version: int, delete: bool, fake: bool) -> None: 179 | command = ctx.obj["command"] 180 | try: 181 | files = await command.downgrade(version, delete, fake=fake) 182 | except DowngradeError as e: 183 | return click.secho(str(e), fg=Color.yellow) 184 | for file in files: 185 | if fake: 186 | click.echo(f"Downgrading to {file}... " + click.style("FAKED", fg=Color.green)) 187 | else: 188 | click.secho(f"Success downgrading to {file}", fg=Color.green) 189 | 190 | 191 | @cli.command(help="Show currently available heads (unapplied migrations).") 192 | @click.pass_context 193 | async def heads(ctx: Context) -> None: 194 | command = ctx.obj["command"] 195 | head_list = await command.heads() 196 | if not head_list: 197 | return click.secho("No available heads.", fg=Color.green) 198 | for version in head_list: 199 | click.secho(version, fg=Color.green) 200 | 201 | 202 | @cli.command(help="List all migrations.") 203 | @click.pass_context 204 | async def history(ctx: Context) -> None: 205 | command = ctx.obj["command"] 206 | versions = await command.history() 207 | if not versions: 208 | return click.secho("No migrations created yet.", fg=Color.green) 209 | for version in versions: 210 | click.secho(version, fg=Color.green) 211 | 212 | 213 | def _write_config(config_path: Path, doc: dict, table: dict) -> None: 214 | tomlkit = imports_tomlkit() 215 | 216 | try: 217 | doc["tool"]["aerich"] = table 218 | except KeyError: 219 | doc["tool"] = {"aerich": table} 220 | config_path.write_text(tomlkit.dumps(doc)) 221 | 222 | 223 | @cli.command(help="Initialize aerich config and create migrations folder.") 224 | @click.option( 225 | "-t", 226 | "--tortoise-orm", 227 | required=True, 228 | help="Tortoise-ORM config dict location, like `settings.TORTOISE_ORM`.", 229 | ) 230 | @click.option( 231 | "--location", 232 | default="./migrations", 233 | show_default=True, 234 | help="Migrations folder.", 235 | ) 236 | @click.option( 237 | "-s", 238 | "--src_folder", 239 | default=CONFIG_DEFAULT_VALUES["src_folder"], 240 | show_default=False, 241 | help="Folder of the source, relative to the project root.", 242 | ) 243 | @click.pass_context 244 | async def init(ctx: Context, tortoise_orm: str, location: str, src_folder: str) -> None: 245 | config_file = ctx.obj["config_file"] 246 | 247 | if os.path.isabs(src_folder): 248 | src_folder = os.path.relpath(os.getcwd(), src_folder) 249 | # Add ./ so it's clear that this is relative path 250 | if not src_folder.startswith("./"): 251 | src_folder = "./" + src_folder 252 | 253 | # check that we can find the configuration, if not we can fail before the config file gets created 254 | add_src_path(src_folder) 255 | get_tortoise_config(ctx, tortoise_orm) 256 | config_path = Path(config_file) 257 | content = config_path.read_text("utf-8") if config_path.exists() else "[tool.aerich]" 258 | doc: dict = tomllib.loads(content) 259 | 260 | table = {"tortoise_orm": tortoise_orm, "location": location, "src_folder": src_folder} 261 | if (aerich_config := doc.get("tool", {}).get("aerich")) and all( 262 | aerich_config.get(k) == v for k, v in table.items() 263 | ): 264 | click.echo(f"Aerich config {config_file} already inited.") 265 | else: 266 | _write_config(config_path, doc, table) 267 | click.secho(f"Success writing aerich config to {config_file}", fg=Color.green) 268 | 269 | Path(location).mkdir(parents=True, exist_ok=True) 270 | click.secho(f"Success creating migrations folder {location}", fg=Color.green) 271 | 272 | 273 | @cli.command(help="Generate schema and generate app migration folder.") 274 | @click.option( 275 | "-s", 276 | "--safe", 277 | type=bool, 278 | is_flag=True, 279 | default=True, 280 | help="Create tables only when they do not already exist.", 281 | show_default=True, 282 | ) 283 | @click.pass_context 284 | async def init_db(ctx: Context, safe: bool) -> None: 285 | command = ctx.obj["command"] 286 | app = command.app 287 | dirname = Path(command.location, app) 288 | try: 289 | await command.init_db(safe) 290 | click.secho(f"Success creating app migration folder {dirname}", fg=Color.green) 291 | click.secho(f'Success generating initial migration file for app "{app}"', fg=Color.green) 292 | except FileExistsError: 293 | return click.secho( 294 | f"App {app} is already initialized. Delete {dirname} and try again.", fg=Color.yellow 295 | ) 296 | 297 | 298 | @cli.command(help="Prints the current database tables to stdout as Tortoise-ORM models.") 299 | @click.option( 300 | "-t", 301 | "--table", 302 | help="Which tables to inspect.", 303 | multiple=True, 304 | required=False, 305 | ) 306 | @click.pass_context 307 | async def inspectdb(ctx: Context, table: list[str]) -> None: 308 | command = ctx.obj["command"] 309 | ret = await command.inspectdb(table) 310 | click.secho(ret) 311 | 312 | 313 | def main() -> None: 314 | cli() 315 | 316 | 317 | if __name__ == "__main__": 318 | main() 319 | -------------------------------------------------------------------------------- /aerich/coder.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import base64 4 | import json 5 | import pickle # nosec: B301,B403 6 | from typing import Any 7 | 8 | from tortoise.indexes import Index 9 | 10 | 11 | class JsonEncoder(json.JSONEncoder): 12 | def default(self, obj: Any) -> Any: 13 | if isinstance(obj, Index): 14 | if hasattr(obj, "describe"): 15 | # For tortoise>=0.24 16 | return obj.describe() 17 | return { 18 | "type": "index", 19 | "val": base64.b64encode(pickle.dumps(obj)).decode(), # nosec: B301 20 | } 21 | else: 22 | return super().default(obj) 23 | 24 | 25 | def object_hook(obj: dict[str, Any]) -> Any: 26 | if (type_ := obj.get("type")) and type_ == "index" and (val := obj.get("val")): 27 | return pickle.loads(base64.b64decode(val)) # nosec: B301 28 | return obj 29 | 30 | 31 | def load_index(obj: dict[str, Any]) -> Index: 32 | """Convert a dict that generated by `Index.decribe()` to a Index instance""" 33 | try: 34 | index = Index(fields=obj["fields"] or obj["expressions"], name=obj.get("name")) 35 | except KeyError: 36 | return object_hook(obj) 37 | if extra := obj.get("extra"): 38 | index.extra = extra 39 | if idx_type := obj.get("type"): 40 | index.INDEX_TYPE = idx_type 41 | return index 42 | 43 | 44 | def encoder(obj: dict[str, Any]) -> str: 45 | return json.dumps(obj, cls=JsonEncoder) 46 | 47 | 48 | def decoder(obj: str | bytes) -> Any: 49 | return json.loads(obj, object_hook=object_hook) 50 | -------------------------------------------------------------------------------- /aerich/ddl/__init__.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import re 4 | from enum import Enum 5 | from typing import TYPE_CHECKING, Any, cast 6 | 7 | from tortoise.backends.base.schema_generator import BaseSchemaGenerator 8 | 9 | from aerich._compat import tortoise_version_less_than 10 | from aerich.utils import is_default_function 11 | 12 | if TYPE_CHECKING: 13 | from tortoise import BaseDBAsyncClient, Model 14 | 15 | 16 | class BaseDDL: 17 | schema_generator_cls: type[BaseSchemaGenerator] = BaseSchemaGenerator 18 | DIALECT = "sql" 19 | _DROP_TABLE_TEMPLATE = 'DROP TABLE IF EXISTS "{table_name}"' 20 | _ADD_COLUMN_TEMPLATE = 'ALTER TABLE "{table_name}" ADD {column}' 21 | _DROP_COLUMN_TEMPLATE = 'ALTER TABLE "{table_name}" DROP COLUMN "{column_name}"' 22 | _ALTER_DEFAULT_TEMPLATE = 'ALTER TABLE "{table_name}" ALTER COLUMN "{column}" {default}' 23 | _RENAME_COLUMN_TEMPLATE = ( 24 | 'ALTER TABLE "{table_name}" RENAME COLUMN "{old_column_name}" TO "{new_column_name}"' 25 | ) 26 | _ADD_INDEX_TEMPLATE = 'ALTER TABLE "{table_name}" ADD {index_type}{unique}INDEX "{index_name}" ({column_names}){extra}' 27 | _DROP_INDEX_TEMPLATE = 'ALTER TABLE "{table_name}" DROP INDEX IF EXISTS "{index_name}"' 28 | _DROP_CONSTRAINT_TEMPLATE = 'ALTER TABLE "{table_name}" DROP CONSTRAINT IF EXISTS "{name}"' 29 | _ADD_FK_TEMPLATE = 'ALTER TABLE "{table_name}" ADD CONSTRAINT "{fk_name}" FOREIGN KEY ("{db_column}") REFERENCES "{table}" ("{field}") ON DELETE {on_delete}' 30 | _DROP_FK_TEMPLATE = 'ALTER TABLE "{table_name}" DROP FOREIGN KEY "{fk_name}"' 31 | _M2M_TABLE_TEMPLATE = ( 32 | 'CREATE TABLE "{table_name}" (\n' 33 | ' "{backward_key}" {backward_type} NOT NULL REFERENCES "{backward_table}" ("{backward_field}") ON DELETE CASCADE,\n' 34 | ' "{forward_key}" {forward_type} NOT NULL REFERENCES "{forward_table}" ("{forward_field}") ON DELETE {on_delete}\n' 35 | "){extra}{comment}" 36 | ) 37 | _MODIFY_COLUMN_TEMPLATE = 'ALTER TABLE "{table_name}" MODIFY COLUMN {column}' 38 | _CHANGE_COLUMN_TEMPLATE = ( 39 | 'ALTER TABLE "{table_name}" CHANGE {old_column_name} {new_column_name} {new_column_type}' 40 | ) 41 | _RENAME_TABLE_TEMPLATE = 'ALTER TABLE "{old_table_name}" RENAME TO "{new_table_name}"' 42 | 43 | def __init__(self, client: BaseDBAsyncClient) -> None: 44 | self.client = client 45 | self.schema_generator = self.schema_generator_cls(client) 46 | 47 | @staticmethod 48 | def get_table_name(model: type[Model]) -> str: 49 | return model._meta.db_table 50 | 51 | def create_table(self, model: type[Model]) -> str: 52 | schema = self.schema_generator._get_table_sql(model, True)["table_creation_string"] 53 | if tortoise_version_less_than("0.23.1"): 54 | # Remove extra space 55 | schema = re.sub(r'(["()A-Za-z]) (["()A-Za-z])', r"\1 \2", schema) 56 | return schema.rstrip(";") 57 | 58 | def drop_table(self, table_name: str) -> str: 59 | return self._DROP_TABLE_TEMPLATE.format(table_name=table_name) 60 | 61 | def create_m2m( 62 | self, model: type[Model], field_describe: dict, reference_table_describe: dict 63 | ) -> str: 64 | through = cast(str, field_describe.get("through")) 65 | description = field_describe.get("description") 66 | pk_field = cast(dict, reference_table_describe.get("pk_field")) 67 | reference_id = pk_field.get("db_column") 68 | db_field_types = cast(dict, pk_field.get("db_field_types")) 69 | return self._M2M_TABLE_TEMPLATE.format( 70 | table_name=through, 71 | backward_table=model._meta.db_table, 72 | forward_table=reference_table_describe.get("table"), 73 | backward_field=model._meta.db_pk_column, 74 | forward_field=reference_id, 75 | backward_key=field_describe.get("backward_key"), 76 | backward_type=model._meta.pk.get_for_dialect(self.DIALECT, "SQL_TYPE"), 77 | forward_key=field_describe.get("forward_key"), 78 | forward_type=db_field_types.get(self.DIALECT) or db_field_types.get(""), 79 | on_delete=field_describe.get("on_delete"), 80 | extra=self.schema_generator._table_generate_extra(table=through), 81 | comment=( 82 | self.schema_generator._table_comment_generator(table=through, comment=description) 83 | if description 84 | else "" 85 | ), 86 | ) 87 | 88 | def drop_m2m(self, table_name: str) -> str: 89 | return self._DROP_TABLE_TEMPLATE.format(table_name=table_name) 90 | 91 | def _get_default(self, model: type[Model], field_describe: dict) -> Any: 92 | db_table = model._meta.db_table 93 | default = field_describe.get("default") 94 | if isinstance(default, Enum): 95 | default = default.value 96 | db_column = cast(str, field_describe.get("db_column")) 97 | auto_now_add = field_describe.get("auto_now_add", False) 98 | auto_now = field_describe.get("auto_now", False) 99 | if default is not None or auto_now_add: 100 | if field_describe.get("field_type") in [ 101 | "UUIDField", 102 | "TextField", 103 | "JSONField", 104 | ] or is_default_function(default): 105 | default = "" 106 | else: 107 | try: 108 | default = self.schema_generator._column_default_generator( 109 | db_table, 110 | db_column, 111 | self.schema_generator._escape_default_value(default), 112 | auto_now_add, 113 | auto_now, 114 | ) 115 | except NotImplementedError: 116 | default = "" 117 | return default 118 | 119 | def add_column(self, model: type[Model], field_describe: dict, is_pk: bool = False) -> str: 120 | return self._add_or_modify_column(model, field_describe, is_pk) 121 | 122 | def _add_or_modify_column( 123 | self, model: type[Model], field_describe: dict, is_pk: bool, modify: bool = False 124 | ) -> str: 125 | db_table = model._meta.db_table 126 | description = field_describe.get("description") 127 | db_column = cast(str, field_describe.get("db_column")) 128 | db_field_types = cast(dict, field_describe.get("db_field_types")) 129 | default = self._get_default(model, field_describe) 130 | if default is None: 131 | default = "" 132 | if modify: 133 | unique = "" 134 | template = self._MODIFY_COLUMN_TEMPLATE 135 | else: 136 | # sqlite does not support alter table to add unique column 137 | unique = " UNIQUE" if field_describe.get("unique") and self.DIALECT != "sqlite" else "" 138 | template = self._ADD_COLUMN_TEMPLATE 139 | column = self.schema_generator._create_string( 140 | db_column=db_column, 141 | field_type=db_field_types.get(self.DIALECT, db_field_types.get("")), 142 | nullable=" NOT NULL" if not field_describe.get("nullable") else "", 143 | unique=unique, 144 | comment=( 145 | self.schema_generator._column_comment_generator( 146 | table=db_table, 147 | column=db_column, 148 | comment=description, 149 | ) 150 | if description 151 | else "" 152 | ), 153 | is_primary_key=is_pk, 154 | default=default, 155 | ) 156 | if tortoise_version_less_than("0.23.1"): 157 | column = column.replace(" ", " ") 158 | return template.format(table_name=db_table, column=column) 159 | 160 | def drop_column(self, model: type[Model], column_name: str) -> str: 161 | return self._DROP_COLUMN_TEMPLATE.format( 162 | table_name=model._meta.db_table, column_name=column_name 163 | ) 164 | 165 | def modify_column(self, model: type[Model], field_describe: dict, is_pk: bool = False) -> str: 166 | return self._add_or_modify_column(model, field_describe, is_pk, modify=True) 167 | 168 | def rename_column(self, model: type[Model], old_column_name: str, new_column_name: str) -> str: 169 | return self._RENAME_COLUMN_TEMPLATE.format( 170 | table_name=model._meta.db_table, 171 | old_column_name=old_column_name, 172 | new_column_name=new_column_name, 173 | ) 174 | 175 | def change_column( 176 | self, model: type[Model], old_column_name: str, new_column_name: str, new_column_type: str 177 | ) -> str: 178 | return self._CHANGE_COLUMN_TEMPLATE.format( 179 | table_name=model._meta.db_table, 180 | old_column_name=old_column_name, 181 | new_column_name=new_column_name, 182 | new_column_type=new_column_type, 183 | ) 184 | 185 | def _index_name(self, unique: bool | None, model: type[Model], field_names: list[str]) -> str: 186 | func_name = "_get_index_name" 187 | if not hasattr(self.schema_generator, func_name): 188 | # For tortoise-orm<0.24.1 189 | func_name = "_generate_index_name" 190 | return getattr(self.schema_generator, func_name)( 191 | "idx" if not unique else "uid", model, field_names 192 | ) 193 | 194 | def add_index( 195 | self, 196 | model: type[Model], 197 | field_names: list[str], 198 | unique: bool | None = False, 199 | name: str | None = None, 200 | index_type: str = "", 201 | extra: str | None = "", 202 | ) -> str: 203 | return self._ADD_INDEX_TEMPLATE.format( 204 | unique="UNIQUE " if unique else "", 205 | index_name=name or self._index_name(unique, model, field_names), 206 | table_name=model._meta.db_table, 207 | column_names=", ".join(self.schema_generator.quote(f) for f in field_names), 208 | index_type=f"{index_type} " if index_type else "", 209 | extra=f"{extra}" if extra else "", 210 | ) 211 | 212 | def drop_index( 213 | self, 214 | model: type[Model], 215 | field_names: list[str], 216 | unique: bool | None = False, 217 | name: str | None = None, 218 | ) -> str: 219 | return self._DROP_INDEX_TEMPLATE.format( 220 | index_name=name or self._index_name(unique, model, field_names), 221 | table_name=model._meta.db_table, 222 | ) 223 | 224 | def drop_index_by_name(self, model: type[Model], index_name: str) -> str: 225 | return self.drop_index(model, [], name=index_name) 226 | 227 | def drop_unique_constraint(self, model: type[Model], name: str) -> str: 228 | return self._DROP_CONSTRAINT_TEMPLATE.format( 229 | table_name=model._meta.db_table, 230 | name=name, 231 | ) 232 | 233 | def _generate_fk_name( 234 | self, db_table: str, field_describe: dict, reference_table_describe: dict 235 | ) -> str: 236 | """Generate fk name""" 237 | db_column = cast(str, field_describe.get("raw_field")) 238 | pk_field = cast(dict, reference_table_describe.get("pk_field")) 239 | to_field = cast(str, pk_field.get("db_column")) 240 | to_table = cast(str, reference_table_describe.get("table")) 241 | func_name = "_get_fk_name" 242 | if not hasattr(self.schema_generator, func_name): 243 | # For tortoise-orm<0.24.1 244 | func_name = "_generate_fk_name" 245 | return getattr(self.schema_generator, func_name)( 246 | from_table=db_table, 247 | from_field=db_column, 248 | to_table=to_table, 249 | to_field=to_field, 250 | ) 251 | 252 | def add_fk( 253 | self, model: type[Model], field_describe: dict, reference_table_describe: dict 254 | ) -> str: 255 | db_table = model._meta.db_table 256 | 257 | db_column = field_describe.get("raw_field") 258 | pk_field = cast(dict, reference_table_describe.get("pk_field")) 259 | reference_id = pk_field.get("db_column") 260 | return self._ADD_FK_TEMPLATE.format( 261 | table_name=db_table, 262 | fk_name=self._generate_fk_name(db_table, field_describe, reference_table_describe), 263 | db_column=db_column, 264 | table=reference_table_describe.get("table"), 265 | field=reference_id, 266 | on_delete=field_describe.get("on_delete"), 267 | ) 268 | 269 | def drop_fk( 270 | self, model: type[Model], field_describe: dict, reference_table_describe: dict 271 | ) -> str: 272 | db_table = model._meta.db_table 273 | fk_name = self._generate_fk_name(db_table, field_describe, reference_table_describe) 274 | return self._DROP_FK_TEMPLATE.format(table_name=db_table, fk_name=fk_name) 275 | 276 | def alter_column_default(self, model: type[Model], field_describe: dict) -> str: 277 | db_table = model._meta.db_table 278 | default = self._get_default(model, field_describe) 279 | return self._ALTER_DEFAULT_TEMPLATE.format( 280 | table_name=db_table, 281 | column=field_describe.get("db_column"), 282 | default="SET" + default if default is not None else "DROP DEFAULT", 283 | ) 284 | 285 | def alter_column_null(self, model: type[Model], field_describe: dict) -> str: 286 | return self.modify_column(model, field_describe) 287 | 288 | def set_comment(self, model: type[Model], field_describe: dict) -> str: 289 | return self.modify_column(model, field_describe) 290 | 291 | def rename_table(self, model: type[Model], old_table_name: str, new_table_name: str) -> str: 292 | db_table = model._meta.db_table 293 | return self._RENAME_TABLE_TEMPLATE.format( 294 | table_name=db_table, old_table_name=old_table_name, new_table_name=new_table_name 295 | ) 296 | 297 | def alter_indexed_column_unique( 298 | self, model: type[Model], field_name: str, drop: bool = False 299 | ) -> list[str]: 300 | """Change unique constraint for indexed field, e.g.: Field(db_index=True) --> Field(unique=True)""" 301 | fields = [field_name] 302 | if drop: 303 | drop_unique = self.drop_index(model, fields, unique=True) 304 | add_normal_index = self.add_index(model, fields, unique=False) 305 | return [drop_unique, add_normal_index] 306 | else: 307 | drop_index = self.drop_index(model, fields, unique=False) 308 | add_unique_index = self.add_index(model, fields, unique=True) 309 | return [drop_index, add_unique_index] 310 | -------------------------------------------------------------------------------- /aerich/ddl/mysql/__init__.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from typing import TYPE_CHECKING 4 | 5 | from tortoise.backends.mysql.schema_generator import MySQLSchemaGenerator 6 | 7 | from aerich.ddl import BaseDDL 8 | 9 | if TYPE_CHECKING: 10 | from tortoise import Model 11 | 12 | 13 | class MysqlDDL(BaseDDL): 14 | schema_generator_cls = MySQLSchemaGenerator 15 | DIALECT = MySQLSchemaGenerator.DIALECT 16 | _DROP_TABLE_TEMPLATE = "DROP TABLE IF EXISTS `{table_name}`" 17 | _ADD_COLUMN_TEMPLATE = "ALTER TABLE `{table_name}` ADD {column}" 18 | _ALTER_DEFAULT_TEMPLATE = "ALTER TABLE `{table_name}` ALTER COLUMN `{column}` {default}" 19 | _CHANGE_COLUMN_TEMPLATE = ( 20 | "ALTER TABLE `{table_name}` CHANGE {old_column_name} {new_column_name} {new_column_type}" 21 | ) 22 | _DROP_COLUMN_TEMPLATE = "ALTER TABLE `{table_name}` DROP COLUMN `{column_name}`" 23 | _RENAME_COLUMN_TEMPLATE = ( 24 | "ALTER TABLE `{table_name}` RENAME COLUMN `{old_column_name}` TO `{new_column_name}`" 25 | ) 26 | _ADD_INDEX_TEMPLATE = "ALTER TABLE `{table_name}` ADD {index_type}{unique}INDEX `{index_name}` ({column_names}){extra}" 27 | _DROP_INDEX_TEMPLATE = "ALTER TABLE `{table_name}` DROP INDEX `{index_name}`" 28 | _ADD_INDEXED_UNIQUE_TEMPLATE = ( 29 | "ALTER TABLE `{table_name}` DROP INDEX `{index_name}`, ADD UNIQUE (`{column_name}`)" 30 | ) 31 | _DROP_INDEXED_UNIQUE_TEMPLATE = ( 32 | "ALTER TABLE `{table_name}` DROP INDEX `{column_name}`, ADD INDEX (`{index_name}`)" 33 | ) 34 | _ADD_FK_TEMPLATE = "ALTER TABLE `{table_name}` ADD CONSTRAINT `{fk_name}` FOREIGN KEY (`{db_column}`) REFERENCES `{table}` (`{field}`) ON DELETE {on_delete}" 35 | _DROP_FK_TEMPLATE = "ALTER TABLE `{table_name}` DROP FOREIGN KEY `{fk_name}`" 36 | _M2M_TABLE_TEMPLATE = ( 37 | "CREATE TABLE `{table_name}` (\n" 38 | " `{backward_key}` {backward_type} NOT NULL REFERENCES `{backward_table}` (`{backward_field}`) ON DELETE CASCADE,\n" 39 | " `{forward_key}` {forward_type} NOT NULL REFERENCES `{forward_table}` (`{forward_field}`) ON DELETE CASCADE\n" 40 | "){extra}{comment}" 41 | ) 42 | _MODIFY_COLUMN_TEMPLATE = "ALTER TABLE `{table_name}` MODIFY COLUMN {column}" 43 | _RENAME_TABLE_TEMPLATE = "ALTER TABLE `{old_table_name}` RENAME TO `{new_table_name}`" 44 | 45 | def _index_name(self, unique: bool | None, model: type[Model], field_names: list[str]) -> str: 46 | if unique and len(field_names) == 1: 47 | # Example: `email = CharField(max_length=50, unique=True)` 48 | # Generate schema: `"email" VARCHAR(10) NOT NULL UNIQUE` 49 | # Unique index key is the same as field name: `email` 50 | return field_names[0] 51 | return super()._index_name(unique, model, field_names) 52 | 53 | def alter_indexed_column_unique( 54 | self, model: type[Model], field_name: str, drop: bool = False 55 | ) -> list[str]: 56 | # if drop is false: Drop index and add unique 57 | # else: Drop unique index and add normal index 58 | template = self._DROP_INDEXED_UNIQUE_TEMPLATE if drop else self._ADD_INDEXED_UNIQUE_TEMPLATE 59 | table = self.get_table_name(model) 60 | index = self._index_name(unique=False, model=model, field_names=[field_name]) 61 | return [template.format(table_name=table, index_name=index, column_name=field_name)] 62 | -------------------------------------------------------------------------------- /aerich/ddl/postgres/__init__.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from typing import cast 4 | 5 | from tortoise import Model 6 | from tortoise.backends.base_postgres.schema_generator import BasePostgresSchemaGenerator 7 | 8 | from aerich.ddl import BaseDDL 9 | 10 | 11 | class PostgresDDL(BaseDDL): 12 | schema_generator_cls = BasePostgresSchemaGenerator 13 | DIALECT = BasePostgresSchemaGenerator.DIALECT 14 | _ADD_INDEX_TEMPLATE = 'CREATE {unique}INDEX IF NOT EXISTS "{index_name}" ON "{table_name}" {index_type}({column_names}){extra}' 15 | _DROP_INDEX_TEMPLATE = 'DROP INDEX IF EXISTS "{index_name}"' 16 | _ALTER_NULL_TEMPLATE = 'ALTER TABLE "{table_name}" ALTER COLUMN "{column}" {set_drop} NOT NULL' 17 | _MODIFY_COLUMN_TEMPLATE = ( 18 | 'ALTER TABLE "{table_name}" ALTER COLUMN "{column}" TYPE {datatype}{using}' 19 | ) 20 | _SET_COMMENT_TEMPLATE = 'COMMENT ON COLUMN "{table_name}"."{column}" IS {comment}' 21 | _DROP_FK_TEMPLATE = 'ALTER TABLE "{table_name}" DROP CONSTRAINT IF EXISTS "{fk_name}"' 22 | 23 | def alter_column_null(self, model: type[Model], field_describe: dict) -> str: 24 | db_table = model._meta.db_table 25 | return self._ALTER_NULL_TEMPLATE.format( 26 | table_name=db_table, 27 | column=field_describe.get("db_column"), 28 | set_drop="DROP" if field_describe.get("nullable") else "SET", 29 | ) 30 | 31 | def modify_column(self, model: type[Model], field_describe: dict, is_pk: bool = False) -> str: 32 | db_table = model._meta.db_table 33 | db_field_types = cast(dict, field_describe.get("db_field_types")) 34 | db_column = field_describe.get("db_column") 35 | datatype = db_field_types.get(self.DIALECT) or db_field_types.get("") 36 | return self._MODIFY_COLUMN_TEMPLATE.format( 37 | table_name=db_table, 38 | column=db_column, 39 | datatype=datatype, 40 | using=f' USING "{db_column}"::{datatype}', 41 | ) 42 | 43 | def set_comment(self, model: type[Model], field_describe: dict) -> str: 44 | db_table = model._meta.db_table 45 | return self._SET_COMMENT_TEMPLATE.format( 46 | table_name=db_table, 47 | column=field_describe.get("db_column") or field_describe.get("raw_field"), 48 | comment=( 49 | "'{}'".format(field_describe.get("description")) 50 | if field_describe.get("description") 51 | else "NULL" 52 | ), 53 | ) 54 | -------------------------------------------------------------------------------- /aerich/ddl/sqlite/__init__.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from tortoise import Model 4 | from tortoise.backends.sqlite.schema_generator import SqliteSchemaGenerator 5 | 6 | from aerich.ddl import BaseDDL 7 | from aerich.exceptions import NotSupportError 8 | 9 | 10 | class SqliteDDL(BaseDDL): 11 | schema_generator_cls = SqliteSchemaGenerator 12 | DIALECT = SqliteSchemaGenerator.DIALECT 13 | _ADD_INDEX_TEMPLATE = 'CREATE {unique}INDEX "{index_name}" ON "{table_name}" ({column_names})' 14 | _DROP_INDEX_TEMPLATE = 'DROP INDEX IF EXISTS "{index_name}"' 15 | 16 | def modify_column(self, model: type[Model], field_object: dict, is_pk: bool = True): 17 | raise NotSupportError("Modify column is unsupported in SQLite.") 18 | 19 | def alter_column_default(self, model: type[Model], field_describe: dict): 20 | raise NotSupportError("Alter column default is unsupported in SQLite.") 21 | 22 | def alter_column_null(self, model: type[Model], field_describe: dict): 23 | raise NotSupportError("Alter column null is unsupported in SQLite.") 24 | 25 | def set_comment(self, model: type[Model], field_describe: dict): 26 | raise NotSupportError("Alter column comment is unsupported in SQLite.") 27 | -------------------------------------------------------------------------------- /aerich/enums.py: -------------------------------------------------------------------------------- 1 | from enum import Enum 2 | 3 | 4 | class Color(str, Enum): 5 | green = "green" 6 | red = "red" 7 | yellow = "yellow" 8 | -------------------------------------------------------------------------------- /aerich/exceptions.py: -------------------------------------------------------------------------------- 1 | class NotSupportError(Exception): 2 | """ 3 | raise when features not support 4 | """ 5 | 6 | 7 | class DowngradeError(Exception): 8 | """ 9 | raise when downgrade error 10 | """ 11 | -------------------------------------------------------------------------------- /aerich/inspectdb/__init__.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import contextlib 4 | from typing import Any, Callable, TypedDict 5 | 6 | from pydantic import BaseModel 7 | from tortoise import BaseDBAsyncClient 8 | 9 | 10 | class ColumnInfoDict(TypedDict): 11 | name: str 12 | pk: str 13 | index: str 14 | null: str 15 | default: str 16 | length: str 17 | comment: str 18 | 19 | 20 | FieldMapDict = dict[str, Callable[..., str]] 21 | 22 | 23 | class Column(BaseModel): 24 | name: str 25 | data_type: str 26 | null: bool 27 | default: Any 28 | comment: str | None = None 29 | pk: bool 30 | unique: bool 31 | index: bool 32 | length: int | None = None 33 | extra: str | None = None 34 | decimal_places: int | None = None 35 | max_digits: int | None = None 36 | 37 | def translate(self) -> ColumnInfoDict: 38 | comment = default = length = index = null = pk = "" 39 | if self.pk: 40 | pk = "primary_key=True, " 41 | else: 42 | if self.unique: 43 | index = "unique=True, " 44 | elif self.index: 45 | index = "db_index=True, " 46 | if self.data_type in ("varchar", "VARCHAR"): 47 | length = f"max_length={self.length}, " 48 | elif self.data_type in ("decimal", "numeric"): 49 | length_parts = [] 50 | if self.max_digits: 51 | length_parts.append(f"max_digits={self.max_digits}") 52 | if self.decimal_places: 53 | length_parts.append(f"decimal_places={self.decimal_places}") 54 | if length_parts: 55 | length = ", ".join(length_parts) + ", " 56 | if self.null: 57 | null = "null=True, " 58 | if self.default is not None and not self.pk: 59 | if self.data_type in ("tinyint", "INT"): 60 | default = f"default={'True' if self.default == '1' else 'False'}, " 61 | elif self.data_type == "bool": 62 | default = f"default={'True' if self.default == 'true' else 'False'}, " 63 | elif self.data_type in ("datetime", "timestamptz", "TIMESTAMP"): 64 | if self.default == "CURRENT_TIMESTAMP": 65 | if self.extra == "DEFAULT_GENERATED on update CURRENT_TIMESTAMP": 66 | default = "auto_now=True, " 67 | else: 68 | default = "auto_now_add=True, " 69 | else: 70 | if "::" in self.default: 71 | default = f"default={self.default.split('::')[0]}, " 72 | elif self.default.endswith("()"): 73 | default = "" 74 | elif self.default == "": 75 | default = 'default=""' 76 | else: 77 | default = f"default={self.default}, " 78 | 79 | if self.comment: 80 | comment = f"description='{self.comment}', " 81 | return { 82 | "name": self.name, 83 | "pk": pk, 84 | "index": index, 85 | "null": null, 86 | "default": default, 87 | "length": length, 88 | "comment": comment, 89 | } 90 | 91 | 92 | class Inspect: 93 | _table_template = "class {table}(Model):\n" 94 | 95 | def __init__(self, conn: BaseDBAsyncClient, tables: list[str] | None = None) -> None: 96 | self.conn = conn 97 | with contextlib.suppress(AttributeError): 98 | self.database = conn.database # type:ignore[attr-defined] 99 | self.tables = tables 100 | 101 | @property 102 | def field_map(self) -> FieldMapDict: 103 | raise NotImplementedError 104 | 105 | async def inspect(self) -> str: 106 | if not self.tables: 107 | self.tables = await self.get_all_tables() 108 | result = "from tortoise import Model, fields\n\n\n" 109 | tables = [] 110 | for table in self.tables: 111 | columns = await self.get_columns(table) 112 | fields = [] 113 | model = self._table_template.format(table=table.title().replace("_", "")) 114 | for column in columns: 115 | field = self.field_map[column.data_type](**column.translate()) 116 | fields.append(" " + field) 117 | tables.append(model + "\n".join(fields)) 118 | return result + "\n\n\n".join(tables) 119 | 120 | async def get_columns(self, table: str) -> list[Column]: 121 | raise NotImplementedError 122 | 123 | async def get_all_tables(self) -> list[str]: 124 | raise NotImplementedError 125 | 126 | @staticmethod 127 | def get_field_string( 128 | field_class: str, arguments: str = "{null}{default}{comment}", **kwargs 129 | ) -> str: 130 | name = kwargs["name"] 131 | field_params = arguments.format(**kwargs).strip().rstrip(",") 132 | return f"{name} = fields.{field_class}({field_params})" 133 | 134 | @classmethod 135 | def decimal_field(cls, **kwargs) -> str: 136 | return cls.get_field_string("DecimalField", **kwargs) 137 | 138 | @classmethod 139 | def time_field(cls, **kwargs) -> str: 140 | return cls.get_field_string("TimeField", **kwargs) 141 | 142 | @classmethod 143 | def date_field(cls, **kwargs) -> str: 144 | return cls.get_field_string("DateField", **kwargs) 145 | 146 | @classmethod 147 | def float_field(cls, **kwargs) -> str: 148 | return cls.get_field_string("FloatField", **kwargs) 149 | 150 | @classmethod 151 | def datetime_field(cls, **kwargs) -> str: 152 | return cls.get_field_string("DatetimeField", **kwargs) 153 | 154 | @classmethod 155 | def text_field(cls, **kwargs) -> str: 156 | return cls.get_field_string("TextField", **kwargs) 157 | 158 | @classmethod 159 | def char_field(cls, **kwargs) -> str: 160 | arguments = "{pk}{index}{length}{null}{default}{comment}" 161 | return cls.get_field_string("CharField", arguments, **kwargs) 162 | 163 | @classmethod 164 | def int_field(cls, field_class="IntField", **kwargs) -> str: 165 | arguments = "{pk}{index}{default}{comment}" 166 | return cls.get_field_string(field_class, arguments, **kwargs) 167 | 168 | @classmethod 169 | def smallint_field(cls, **kwargs) -> str: 170 | return cls.int_field("SmallIntField", **kwargs) 171 | 172 | @classmethod 173 | def bigint_field(cls, **kwargs) -> str: 174 | return cls.int_field("BigIntField", **kwargs) 175 | 176 | @classmethod 177 | def bool_field(cls, **kwargs) -> str: 178 | return cls.get_field_string("BooleanField", **kwargs) 179 | 180 | @classmethod 181 | def uuid_field(cls, **kwargs) -> str: 182 | arguments = "{pk}{index}{default}{comment}" 183 | return cls.get_field_string("UUIDField", arguments, **kwargs) 184 | 185 | @classmethod 186 | def json_field(cls, **kwargs) -> str: 187 | return cls.get_field_string("JSONField", **kwargs) 188 | 189 | @classmethod 190 | def binary_field(cls, **kwargs) -> str: 191 | return cls.get_field_string("BinaryField", **kwargs) 192 | -------------------------------------------------------------------------------- /aerich/inspectdb/mysql.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from aerich.inspectdb import Column, FieldMapDict, Inspect 4 | 5 | 6 | class InspectMySQL(Inspect): 7 | @property 8 | def field_map(self) -> FieldMapDict: 9 | return { 10 | "int": self.int_field, 11 | "smallint": self.smallint_field, 12 | "tinyint": self.bool_field, 13 | "bigint": self.bigint_field, 14 | "varchar": self.char_field, 15 | "char": self.uuid_field, 16 | "longtext": self.text_field, 17 | "text": self.text_field, 18 | "datetime": self.datetime_field, 19 | "float": self.float_field, 20 | "double": self.float_field, 21 | "date": self.date_field, 22 | "time": self.time_field, 23 | "decimal": self.decimal_field, 24 | "json": self.json_field, 25 | "longblob": self.binary_field, 26 | } 27 | 28 | async def get_all_tables(self) -> list[str]: 29 | sql = "select TABLE_NAME from information_schema.TABLES where TABLE_SCHEMA=%s" 30 | ret = await self.conn.execute_query_dict(sql, [self.database]) 31 | return list(map(lambda x: x["TABLE_NAME"], ret)) 32 | 33 | async def get_columns(self, table: str) -> list[Column]: 34 | columns = [] 35 | sql = """select c.*, s.NON_UNIQUE, s.INDEX_NAME 36 | from information_schema.COLUMNS c 37 | left join information_schema.STATISTICS s on c.TABLE_NAME = s.TABLE_NAME 38 | and c.TABLE_SCHEMA = s.TABLE_SCHEMA 39 | and c.COLUMN_NAME = s.COLUMN_NAME 40 | where c.TABLE_SCHEMA = %s 41 | and c.TABLE_NAME = %s""" 42 | ret = await self.conn.execute_query_dict(sql, [self.database, table]) 43 | for row in ret: 44 | unique = index = False 45 | if (non_unique := row["NON_UNIQUE"]) is not None: 46 | unique = not non_unique 47 | elif row["COLUMN_KEY"] == "UNI": 48 | unique = True 49 | if (index_name := row["INDEX_NAME"]) is not None: 50 | index = index_name != "PRIMARY" 51 | columns.append( 52 | Column( 53 | name=row["COLUMN_NAME"], 54 | data_type=row["DATA_TYPE"], 55 | null=row["IS_NULLABLE"] == "YES", 56 | default=row["COLUMN_DEFAULT"], 57 | pk=row["COLUMN_KEY"] == "PRI", 58 | comment=row["COLUMN_COMMENT"], 59 | unique=unique, 60 | extra=row["EXTRA"], 61 | index=index, 62 | length=row["CHARACTER_MAXIMUM_LENGTH"], 63 | max_digits=row["NUMERIC_PRECISION"], 64 | decimal_places=row["NUMERIC_SCALE"], 65 | ) 66 | ) 67 | return columns 68 | -------------------------------------------------------------------------------- /aerich/inspectdb/postgres.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import re 4 | from typing import TYPE_CHECKING 5 | 6 | from aerich.inspectdb import Column, FieldMapDict, Inspect 7 | 8 | if TYPE_CHECKING: 9 | from tortoise.backends.base_postgres.client import BasePostgresClient 10 | 11 | 12 | class InspectPostgres(Inspect): 13 | def __init__(self, conn: BasePostgresClient, tables: list[str] | None = None) -> None: 14 | super().__init__(conn, tables) 15 | self.schema = conn.server_settings.get("schema") or "public" 16 | 17 | @property 18 | def field_map(self) -> FieldMapDict: 19 | return { 20 | "int2": self.smallint_field, 21 | "int4": self.int_field, 22 | "int8": self.bigint_field, 23 | "smallint": self.smallint_field, 24 | "bigint": self.bigint_field, 25 | "varchar": self.char_field, 26 | "text": self.text_field, 27 | "timestamptz": self.datetime_field, 28 | "float4": self.float_field, 29 | "float8": self.float_field, 30 | "date": self.date_field, 31 | "time": self.time_field, 32 | "decimal": self.decimal_field, 33 | "numeric": self.decimal_field, 34 | "uuid": self.uuid_field, 35 | "jsonb": self.json_field, 36 | "bytea": self.binary_field, 37 | "bool": self.bool_field, 38 | "timestamp": self.datetime_field, 39 | } 40 | 41 | async def get_all_tables(self) -> list[str]: 42 | sql = "select TABLE_NAME from information_schema.TABLES where table_catalog=$1 and table_schema=$2" 43 | ret = await self.conn.execute_query_dict(sql, [self.database, self.schema]) 44 | return list(map(lambda x: x["table_name"], ret)) 45 | 46 | async def get_columns(self, table: str) -> list[Column]: 47 | columns = [] 48 | sql = f"""select c.column_name, 49 | col_description('public.{table}'::regclass, ordinal_position) as column_comment, 50 | t.constraint_type as column_key, 51 | udt_name as data_type, 52 | is_nullable, 53 | column_default, 54 | character_maximum_length, 55 | numeric_precision, 56 | numeric_scale 57 | from information_schema.constraint_column_usage const 58 | join information_schema.table_constraints t 59 | using (table_catalog, table_schema, table_name, constraint_catalog, constraint_schema, constraint_name) 60 | right join information_schema.columns c using (column_name, table_catalog, table_schema, table_name) 61 | where c.table_catalog = $1 62 | and c.table_name = $2 63 | and c.table_schema = $3""" # nosec:B608 64 | if "psycopg" in str(type(self.conn)).lower(): 65 | sql = re.sub(r"\$[123]", "%s", sql) 66 | ret = await self.conn.execute_query_dict(sql, [self.database, table, self.schema]) 67 | for row in ret: 68 | columns.append( 69 | Column( 70 | name=row["column_name"], 71 | data_type=row["data_type"], 72 | null=row["is_nullable"] == "YES", 73 | default=row["column_default"], 74 | length=row["character_maximum_length"], 75 | max_digits=row["numeric_precision"], 76 | decimal_places=row["numeric_scale"], 77 | comment=row["column_comment"], 78 | pk=row["column_key"] == "PRIMARY KEY", 79 | unique=False, # can't get this simply 80 | index=False, # can't get this simply 81 | ) 82 | ) 83 | return columns 84 | -------------------------------------------------------------------------------- /aerich/inspectdb/sqlite.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from aerich.inspectdb import Column, FieldMapDict, Inspect 4 | 5 | 6 | class InspectSQLite(Inspect): 7 | @property 8 | def field_map(self) -> FieldMapDict: 9 | return { 10 | "INTEGER": self.int_field, 11 | "INT": self.bool_field, 12 | "SMALLINT": self.smallint_field, 13 | "VARCHAR": self.char_field, 14 | "TEXT": self.text_field, 15 | "TIMESTAMP": self.datetime_field, 16 | "REAL": self.float_field, 17 | "BIGINT": self.bigint_field, 18 | "DATE": self.date_field, 19 | "TIME": self.time_field, 20 | "JSON": self.json_field, 21 | "BLOB": self.binary_field, 22 | } 23 | 24 | async def get_columns(self, table: str) -> list[Column]: 25 | columns = [] 26 | sql = f"PRAGMA table_info({table})" 27 | ret = await self.conn.execute_query_dict(sql) 28 | columns_index = await self._get_columns_index(table) 29 | for row in ret: 30 | try: 31 | length = row["type"].split("(")[1].split(")")[0] 32 | except IndexError: 33 | length = None 34 | columns.append( 35 | Column( 36 | name=row["name"], 37 | data_type=row["type"].split("(")[0], 38 | null=row["notnull"] == 0, 39 | default=row["dflt_value"], 40 | length=length, 41 | pk=row["pk"] == 1, 42 | unique=columns_index.get(row["name"]) == "unique", 43 | index=columns_index.get(row["name"]) == "index", 44 | ) 45 | ) 46 | return columns 47 | 48 | async def _get_columns_index(self, table: str) -> dict[str, str]: 49 | sql = f"PRAGMA index_list ({table})" 50 | indexes = await self.conn.execute_query_dict(sql) 51 | ret = {} 52 | for index in indexes: 53 | sql = f"PRAGMA index_info({index['name']})" 54 | index_info = (await self.conn.execute_query_dict(sql))[0] 55 | ret[index_info["name"]] = "unique" if index["unique"] else "index" 56 | return ret 57 | 58 | async def get_all_tables(self) -> list[str]: 59 | sql = "select tbl_name from sqlite_master where type='table' and name!='sqlite_sequence'" 60 | ret = await self.conn.execute_query_dict(sql) 61 | return list(map(lambda x: x["tbl_name"], ret)) 62 | -------------------------------------------------------------------------------- /aerich/models.py: -------------------------------------------------------------------------------- 1 | from tortoise import Model, fields 2 | 3 | from aerich.coder import decoder, encoder 4 | 5 | MAX_VERSION_LENGTH = 255 6 | MAX_APP_LENGTH = 100 7 | 8 | 9 | class Aerich(Model): 10 | version = fields.CharField(max_length=MAX_VERSION_LENGTH) 11 | app = fields.CharField(max_length=MAX_APP_LENGTH) 12 | content: dict = fields.JSONField(encoder=encoder, decoder=decoder) 13 | 14 | class Meta: 15 | ordering = ["-id"] 16 | -------------------------------------------------------------------------------- /aerich/utils.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import importlib.util 4 | import os 5 | import re 6 | import sys 7 | from collections.abc import Awaitable, Callable, Generator 8 | from pathlib import Path 9 | from types import ModuleType 10 | from typing import TypeVar 11 | 12 | from anyio import from_thread 13 | from asyncclick import BadOptionUsage, ClickException, Context 14 | from dictdiffer import diff 15 | from tortoise import BaseDBAsyncClient, Tortoise 16 | 17 | if sys.version_info >= (3, 11): 18 | from typing import ParamSpec, TypeVarTuple, Unpack 19 | else: 20 | from typing_extensions import ParamSpec, TypeVarTuple, Unpack 21 | 22 | T_Retval = TypeVar("T_Retval") 23 | PosArgsT = TypeVarTuple("PosArgsT") 24 | P = ParamSpec("P") 25 | 26 | 27 | def add_src_path(path: str) -> str: 28 | """ 29 | add a folder to the paths, so we can import from there 30 | :param path: path to add 31 | :return: absolute path 32 | """ 33 | if not os.path.isabs(path): 34 | # use the absolute path, otherwise some other things (e.g. __file__) won't work properly 35 | path = os.path.abspath(path) 36 | if not os.path.isdir(path): 37 | raise ClickException(f"Specified source folder does not exist: {path}") 38 | if path not in sys.path: 39 | sys.path.insert(0, path) 40 | return path 41 | 42 | 43 | def get_app_connection_name(config, app_name: str) -> str: 44 | """ 45 | get connection name 46 | :param config: 47 | :param app_name: 48 | :return: the default connection name (Usally it is 'default') 49 | """ 50 | if app := config.get("apps").get(app_name): 51 | return app.get("default_connection", "default") 52 | raise BadOptionUsage(option_name="--app", message=f"Can't get app named {app_name!r}") 53 | 54 | 55 | def get_app_connection(config, app) -> BaseDBAsyncClient: 56 | """ 57 | get connection client 58 | :param config: 59 | :param app: 60 | :return: client instance 61 | """ 62 | return Tortoise.get_connection(get_app_connection_name(config, app)) 63 | 64 | 65 | def get_tortoise_config(ctx: Context, tortoise_orm: str) -> dict: 66 | """ 67 | get tortoise config from module 68 | :param ctx: 69 | :param tortoise_orm: 70 | :return: 71 | """ 72 | splits = tortoise_orm.split(".") 73 | config_path = ".".join(splits[:-1]) 74 | tortoise_config = splits[-1] 75 | 76 | try: 77 | config_module = importlib.import_module(config_path) 78 | except ModuleNotFoundError as e: 79 | raise ClickException(f"Error while importing configuration module: {e}") from None 80 | 81 | config = getattr(config_module, tortoise_config, None) 82 | if not config: 83 | raise BadOptionUsage( 84 | option_name="--config", 85 | message=f'Can\'t get "{tortoise_config}" from module "{config_module}"', 86 | ctx=ctx, 87 | ) 88 | return config 89 | 90 | 91 | def get_models_describe(app: str) -> dict: 92 | """ 93 | get app models describe 94 | :param app: 95 | :return: 96 | """ 97 | ret = {} 98 | for model in Tortoise.apps[app].values(): 99 | managed = getattr(model.Meta, "managed", None) 100 | describe = model.describe() 101 | ret[describe.get("name")] = dict(describe, managed=managed) 102 | return ret 103 | 104 | 105 | def is_default_function(string: str) -> re.Match | None: 106 | return re.match(r"^$", str(string or "")) 107 | 108 | 109 | def import_py_file(file: str | Path) -> ModuleType: 110 | module_name, file_ext = os.path.splitext(os.path.split(file)[-1]) 111 | spec = importlib.util.spec_from_file_location(module_name, file) 112 | module = importlib.util.module_from_spec(spec) # type:ignore[arg-type] 113 | spec.loader.exec_module(module) # type:ignore[union-attr] 114 | return module 115 | 116 | 117 | def get_dict_diff_by_key( 118 | old_fields: list[dict], new_fields: list[dict], key="through" 119 | ) -> Generator[tuple]: 120 | """ 121 | Compare two list by key instead of by index 122 | 123 | :param old_fields: previous field info list 124 | :param new_fields: current field info list 125 | :param key: if two dicts have the same value of this key, action is change; otherwise, is remove/add 126 | :return: similar to dictdiffer.diff 127 | 128 | Example:: 129 | 130 | >>> old = [{'through': 'a'}, {'through': 'b'}, {'through': 'c'}] 131 | >>> new = [{'through': 'a'}, {'through': 'c'}] # remove the second element 132 | >>> list(diff(old, new)) 133 | [('change', [1, 'through'], ('b', 'c')), 134 | ('remove', '', [(2, {'through': 'c'})])] 135 | >>> list(get_dict_diff_by_key(old, new)) 136 | [('remove', '', [(0, {'through': 'b'})])] 137 | 138 | """ 139 | length_old, length_new = len(old_fields), len(new_fields) 140 | if length_old == 0 or length_new == 0 or length_old == length_new == 1: 141 | yield from diff(old_fields, new_fields) 142 | else: 143 | value_index: dict[str, int] = {f[key]: i for i, f in enumerate(new_fields)} 144 | additions = set(range(length_new)) 145 | for field in old_fields: 146 | value = field[key] 147 | if (index := value_index.get(value)) is not None: 148 | additions.remove(index) 149 | yield from diff([field], [new_fields[index]]) # change 150 | else: 151 | yield from diff([field], []) # remove 152 | if additions: 153 | for index in sorted(additions): 154 | yield from diff([], [new_fields[index]]) # add 155 | 156 | 157 | def run_async( 158 | async_func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval]], 159 | *args: Unpack[PosArgsT], 160 | ) -> T_Retval: 161 | """Run async function in worker thread and get the result of it""" 162 | # `asyncio.run(async_func())` can get the result of async function, 163 | # but it will close the running loop. 164 | result: list[T_Retval] = [] 165 | 166 | async def runner() -> None: 167 | res = await async_func(*args) 168 | result.append(res) 169 | 170 | with from_thread.start_blocking_portal() as portal: 171 | portal.call(runner) 172 | 173 | return result[0] 174 | -------------------------------------------------------------------------------- /aerich/version.py: -------------------------------------------------------------------------------- 1 | from importlib.metadata import version 2 | 3 | __version__ = version(__package__) 4 | -------------------------------------------------------------------------------- /conftest.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import asyncio 4 | import os 5 | import sys 6 | from collections.abc import Generator 7 | from contextlib import contextmanager 8 | from pathlib import Path 9 | 10 | import pytest 11 | from tortoise import Tortoise, expand_db_url 12 | from tortoise.backends.base_postgres.schema_generator import BasePostgresSchemaGenerator 13 | from tortoise.backends.mysql.schema_generator import MySQLSchemaGenerator 14 | from tortoise.backends.sqlite.schema_generator import SqliteSchemaGenerator 15 | from tortoise.contrib.test import MEMORY_SQLITE 16 | 17 | from aerich.ddl.mysql import MysqlDDL 18 | from aerich.ddl.postgres import PostgresDDL 19 | from aerich.ddl.sqlite import SqliteDDL 20 | from aerich.migrate import Migrate 21 | from tests._utils import chdir, copy_files, init_db, run_shell 22 | 23 | db_url = os.getenv("TEST_DB", MEMORY_SQLITE) 24 | db_url_second = os.getenv("TEST_DB_SECOND", MEMORY_SQLITE) 25 | tortoise_orm = { 26 | "connections": { 27 | "default": expand_db_url(db_url, testing=True), 28 | "second": expand_db_url(db_url_second, testing=True), 29 | }, 30 | "apps": { 31 | "models": {"models": ["tests.models", "aerich.models"], "default_connection": "default"}, 32 | "models_second": {"models": ["tests.models_second"], "default_connection": "second"}, 33 | }, 34 | } 35 | TEST_DIR = Path(__file__).parent / "tests" 36 | 37 | 38 | @pytest.fixture(scope="function", autouse=True) 39 | def reset_migrate() -> None: 40 | Migrate.upgrade_operators = [] 41 | Migrate.downgrade_operators = [] 42 | Migrate._upgrade_fk_m2m_index_operators = [] 43 | Migrate._downgrade_fk_m2m_index_operators = [] 44 | Migrate._upgrade_m2m = [] 45 | Migrate._downgrade_m2m = [] 46 | 47 | 48 | @pytest.fixture(scope="session") 49 | def event_loop() -> Generator: 50 | policy = asyncio.get_event_loop_policy() 51 | res = policy.new_event_loop() 52 | asyncio.set_event_loop(res) 53 | res._close = res.close # type:ignore[attr-defined] 54 | res.close = lambda: None # type:ignore[method-assign] 55 | 56 | yield res 57 | 58 | res._close() # type:ignore[attr-defined] 59 | 60 | 61 | @pytest.fixture(scope="session", autouse=True) 62 | async def initialize_tests(event_loop, request) -> None: 63 | await init_db(tortoise_orm) 64 | client = Tortoise.get_connection("default") 65 | if client.schema_generator is MySQLSchemaGenerator: 66 | Migrate.ddl = MysqlDDL(client) 67 | elif client.schema_generator is SqliteSchemaGenerator: 68 | Migrate.ddl = SqliteDDL(client) 69 | elif issubclass(client.schema_generator, BasePostgresSchemaGenerator): 70 | Migrate.ddl = PostgresDDL(client) 71 | Migrate.dialect = Migrate.ddl.DIALECT 72 | request.addfinalizer(lambda: event_loop.run_until_complete(Tortoise._drop_databases())) 73 | 74 | 75 | @contextmanager 76 | def _new_aerich_project(tmp_path: Path, asset_dir: Path, models_py: Path, test_dir=TEST_DIR): 77 | settings_py = asset_dir / "settings.py" 78 | _tests_py = asset_dir / "_tests.py" 79 | db_py = asset_dir / "db.py" 80 | models_second_py = test_dir / "models_second.py" 81 | copy_files(settings_py, _tests_py, models_py, models_second_py, db_py, target_dir=tmp_path) 82 | dst_dir = tmp_path / "tests" 83 | dst_dir.mkdir() 84 | dst_dir.joinpath("__init__.py").touch() 85 | copy_files(test_dir / "_utils.py", test_dir / "indexes.py", target_dir=dst_dir) 86 | if should_remove := str(tmp_path) not in sys.path: 87 | sys.path.append(str(tmp_path)) 88 | with chdir(tmp_path): 89 | run_shell("python db.py create", capture_output=False) 90 | try: 91 | yield 92 | finally: 93 | if not os.getenv("AERICH_DONT_DROP_FAKE_DB"): 94 | run_shell("python db.py drop", capture_output=False) 95 | if should_remove: 96 | sys.path.remove(str(tmp_path)) 97 | 98 | 99 | @pytest.fixture 100 | def new_aerich_project(tmp_path: Path): 101 | # Create a tortoise project in tmp_path that managed by aerich using assets from tests/assets/fake/ 102 | asset_dir = TEST_DIR / "assets" / "fake" 103 | models_py = TEST_DIR / "models.py" 104 | with _new_aerich_project(tmp_path, asset_dir, models_py): 105 | yield 106 | 107 | 108 | @pytest.fixture 109 | def tmp_aerich_project(tmp_path: Path): 110 | # Create a tortoise project in tmp_path that managed by aerich using assets from tests/assets/remove_constraint/ 111 | asset_dir = TEST_DIR / "assets" / "remove_constraint" 112 | models_py = asset_dir / "models.py" 113 | with _new_aerich_project(tmp_path, asset_dir, models_py): 114 | yield 115 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [project] 2 | name = "aerich" 3 | version = "0.9.0" 4 | description = "A database migrations tool for Tortoise ORM." 5 | authors = [{name="long2ice", email="long2ice@gmail.com>"}] 6 | license = { text = "Apache-2.0" } 7 | readme = "README.md" 8 | keywords = ["migrate", "Tortoise-ORM", "mysql"] 9 | packages = [{ include = "aerich" }] 10 | include = ["CHANGELOG.md", "LICENSE", "README.md"] 11 | requires-python = ">=3.9" 12 | dependencies = [ 13 | "tortoise-orm (>=0.24.0,<1.0.0)", 14 | "pydantic (>=2.0.2,!=2.1.0,!=2.7.0,<3.0.0)", 15 | "dictdiffer (>=0.9.0,<1.0.0)", 16 | "asyncclick (>=8.1.7,<9.0.0)", 17 | "eval-type-backport (>=0.2.2,<1.0.0); python_version < '3.10'", 18 | ] 19 | 20 | [project.optional-dependencies] 21 | toml = [ 22 | "tomli-w (>=1.1.0,<2.0.0); python_version >= '3.11'", 23 | "tomlkit (>=0.11.4,<1.0.0); python_version < '3.11'", 24 | ] 25 | # Need asyncpg or psyncopg for PostgreSQL 26 | asyncpg = ["asyncpg"] 27 | psycopg = ["psycopg[pool,binary] (>=3.0.12,<4.0.0)"] 28 | # Need asyncmy or aiomysql for MySQL 29 | asyncmy = ["asyncmy>=0.2.9; python_version < '4.0'"] 30 | mysql = ["aiomysql>=0.2.0"] 31 | 32 | [project.urls] 33 | homepage = "https://github.com/tortoise/aerich" 34 | repository = "https://github.com/tortoise/aerich.git" 35 | documentation = "https://github.com/tortoise/aerich" 36 | 37 | [project.scripts] 38 | aerich = "aerich.cli:main" 39 | 40 | [tool.poetry] 41 | requires-poetry = ">=2.0" 42 | 43 | [tool.poetry.group.dev.dependencies] 44 | ruff = "^0.9.0" 45 | bandit = "^1.7.0" 46 | mypy = "^1.10.0" 47 | twine = "^6.1.0" 48 | 49 | [tool.poetry.group.test.dependencies] 50 | pytest = "^8.3.0" 51 | pytest-mock = "^3.14.0" 52 | pytest-xdist = "^3.6.0" 53 | # Breaking change in 0.23.* 54 | # https://github.com/pytest-dev/pytest-asyncio/issues/706 55 | pytest-asyncio = "^0.21.2" 56 | # required for sha256_password by asyncmy 57 | cryptography = {version="*", python=">3.9.0,<3.9.1 || >3.9.1"} 58 | 59 | [tool.aerich] 60 | tortoise_orm = "conftest.tortoise_orm" 61 | location = "./migrations" 62 | src_folder = "./." 63 | 64 | [build-system] 65 | requires = ["poetry-core>=2.0.0"] 66 | build-backend = "poetry.core.masonry.api" 67 | 68 | [tool.pytest.ini_options] 69 | asyncio_mode = 'auto' 70 | 71 | [tool.coverage.run] 72 | branch = true 73 | source = ["aerich"] 74 | 75 | [tool.coverage.report] 76 | exclude_also = [ 77 | "if TYPE_CHECKING:" 78 | ] 79 | 80 | [tool.mypy] 81 | pretty = true 82 | python_version = "3.9" 83 | check_untyped_defs = true 84 | warn_unused_ignores = true 85 | disallow_incomplete_defs = false 86 | exclude = ["tests/assets", "migrations"] 87 | 88 | [[tool.mypy.overrides]] 89 | module = [ 90 | 'dictdiffer.*', 91 | 'tomlkit', 92 | 'tomli_w', 93 | 'tomli', 94 | ] 95 | ignore_missing_imports = true 96 | 97 | [tool.ruff] 98 | line-length = 100 99 | 100 | [tool.ruff.lint] 101 | extend-select = [ 102 | "I", # https://docs.astral.sh/ruff/rules/#isort-i 103 | "SIM", # https://docs.astral.sh/ruff/rules/#flake8-simplify-sim 104 | "FA", # https://docs.astral.sh/ruff/rules/#flake8-future-annotations-fa 105 | "UP", # https://docs.astral.sh/ruff/rules/#pyupgrade-up 106 | "RUF100", # https://docs.astral.sh/ruff/rules/#ruff-specific-rules-ruf 107 | ] 108 | ignore = ["UP031"] # https://docs.astral.sh/ruff/rules/printf-string-formatting/ 109 | 110 | [tool.ruff.lint.per-file-ignores] 111 | "aerich/_compat.py" = ["F401"] 112 | 113 | [tool.bandit] 114 | exclude_dirs = ["tests", "conftest.py"] 115 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tortoise/aerich/3fc3945a1ec527784c8d9056923404dc7e026122/tests/__init__.py -------------------------------------------------------------------------------- /tests/_utils.py: -------------------------------------------------------------------------------- 1 | import contextlib 2 | import os 3 | import platform 4 | import shlex 5 | import shutil 6 | import subprocess 7 | import sys 8 | from pathlib import Path 9 | 10 | from tortoise import Tortoise, generate_schema_for_client 11 | from tortoise.exceptions import DBConnectionError, OperationalError 12 | 13 | if sys.version_info >= (3, 11): 14 | from contextlib import chdir 15 | else: 16 | 17 | class chdir(contextlib.AbstractContextManager): # Copied from source code of Python3.13 18 | """Non thread-safe context manager to change the current working directory.""" 19 | 20 | def __init__(self, path): 21 | self.path = path 22 | self._old_cwd = [] 23 | 24 | def __enter__(self): 25 | self._old_cwd.append(os.getcwd()) 26 | os.chdir(self.path) 27 | 28 | def __exit__(self, *excinfo): 29 | os.chdir(self._old_cwd.pop()) 30 | 31 | 32 | async def drop_db(tortoise_orm) -> None: 33 | # Placing init outside the try-block(suppress) since it doesn't 34 | # establish connections to the DB eagerly. 35 | await Tortoise.init(config=tortoise_orm) 36 | with contextlib.suppress(DBConnectionError, OperationalError): 37 | await Tortoise._drop_databases() 38 | 39 | 40 | async def init_db(tortoise_orm, generate_schemas=True) -> None: 41 | await drop_db(tortoise_orm) 42 | await Tortoise.init(config=tortoise_orm, _create_db=True) 43 | if generate_schemas: 44 | await generate_schema_for_client(Tortoise.get_connection("default"), safe=True) 45 | 46 | 47 | def copy_files(*src_files: Path, target_dir: Path) -> None: 48 | for src in src_files: 49 | shutil.copy(src, target_dir) 50 | 51 | 52 | class Dialect: 53 | test_db_url: str 54 | 55 | @classmethod 56 | def load_env(cls) -> None: 57 | if getattr(cls, "test_db_url", None) is None: 58 | cls.test_db_url = os.getenv("TEST_DB", "") 59 | 60 | @classmethod 61 | def is_postgres(cls) -> bool: 62 | cls.load_env() 63 | return "postgres" in cls.test_db_url 64 | 65 | @classmethod 66 | def is_mysql(cls) -> bool: 67 | cls.load_env() 68 | return "mysql" in cls.test_db_url 69 | 70 | @classmethod 71 | def is_sqlite(cls) -> bool: 72 | cls.load_env() 73 | return not cls.test_db_url or "sqlite" in cls.test_db_url 74 | 75 | 76 | WINDOWS = platform.system() == "Windows" 77 | 78 | 79 | def run_shell(command: str, capture_output=True, **kw) -> str: 80 | if WINDOWS and command.startswith("aerich "): 81 | command = "python -m " + command 82 | r = subprocess.run(shlex.split(command), capture_output=capture_output) 83 | if r.returncode != 0 and r.stderr: 84 | return r.stderr.decode() 85 | if not r.stdout: 86 | return "" 87 | return r.stdout.decode() 88 | -------------------------------------------------------------------------------- /tests/assets/fake/_tests.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from models import NewModel 3 | from models_second import Config 4 | from settings import TORTOISE_ORM 5 | from tortoise import Tortoise 6 | from tortoise.exceptions import OperationalError 7 | 8 | try: 9 | # This error does not translate to tortoise's OperationalError 10 | from psycopg.errors import UndefinedColumn 11 | except ImportError: 12 | errors = (OperationalError,) 13 | else: 14 | errors = (OperationalError, UndefinedColumn) 15 | 16 | 17 | @pytest.fixture(scope="session") 18 | def anyio_backend() -> str: 19 | return "asyncio" 20 | 21 | 22 | @pytest.fixture(autouse=True) 23 | async def init_connections(): 24 | await Tortoise.init(TORTOISE_ORM) 25 | try: 26 | yield 27 | finally: 28 | await Tortoise.close_connections() 29 | 30 | 31 | @pytest.mark.anyio 32 | async def test_init_db(): 33 | m1 = await NewModel.filter(name="") 34 | assert isinstance(m1, list) 35 | m2 = await Config.filter(key="") 36 | assert isinstance(m2, list) 37 | await NewModel.create(name="") 38 | await Config.create(key="", label="", value={}) 39 | 40 | 41 | @pytest.mark.anyio 42 | async def test_fake_field_1(): 43 | assert "field_1" in NewModel._meta.fields_map 44 | assert "field_1" in Config._meta.fields_map 45 | with pytest.raises(errors): 46 | await NewModel.create(name="", field_1=1) 47 | with pytest.raises(errors): 48 | await Config.create(key="", label="", value={}, field_1=1) 49 | 50 | obj1 = NewModel(name="", field_1=1) 51 | with pytest.raises(errors): 52 | await obj1.save() 53 | obj1 = NewModel(name="") 54 | with pytest.raises(errors): 55 | await obj1.save() 56 | with pytest.raises(errors): 57 | obj1 = await NewModel.first() 58 | obj1 = await NewModel.all().first().values("id", "name") 59 | assert obj1 and obj1["id"] 60 | 61 | obj2 = Config(key="", label="", value={}, field_1=1) 62 | with pytest.raises(errors): 63 | await obj2.save() 64 | obj2 = Config(key="", label="", value={}) 65 | with pytest.raises(errors): 66 | await obj2.save() 67 | with pytest.raises(errors): 68 | obj2 = await Config.first() 69 | obj2 = await Config.all().first().values("id", "key") 70 | assert obj2 and obj2["id"] 71 | 72 | 73 | @pytest.mark.anyio 74 | async def test_fake_field_2(): 75 | assert "field_2" in NewModel._meta.fields_map 76 | assert "field_2" in Config._meta.fields_map 77 | with pytest.raises(errors): 78 | await NewModel.create(name="") 79 | with pytest.raises(errors): 80 | await Config.create(key="", label="", value={}) 81 | -------------------------------------------------------------------------------- /tests/assets/fake/db.py: -------------------------------------------------------------------------------- 1 | import asyncclick as click 2 | from settings import TORTOISE_ORM 3 | 4 | from tests._utils import drop_db, init_db 5 | 6 | 7 | @click.group() 8 | def cli(): ... 9 | 10 | 11 | @cli.command() 12 | async def create(): 13 | await init_db(TORTOISE_ORM, False) 14 | click.echo(f"Success to create databases for {TORTOISE_ORM['connections']}") 15 | 16 | 17 | @cli.command() 18 | async def drop(): 19 | await drop_db(TORTOISE_ORM) 20 | click.echo(f"Dropped databases for {TORTOISE_ORM['connections']}") 21 | 22 | 23 | def main(): 24 | cli() 25 | 26 | 27 | if __name__ == "__main__": 28 | main() 29 | -------------------------------------------------------------------------------- /tests/assets/fake/settings.py: -------------------------------------------------------------------------------- 1 | import os 2 | from datetime import date 3 | 4 | from tortoise.contrib.test import MEMORY_SQLITE 5 | 6 | DB_URL = ( 7 | _u.replace("\\{\\}", f"aerich_fake_{date.today():%Y%m%d}") 8 | if (_u := os.getenv("TEST_DB")) 9 | else MEMORY_SQLITE 10 | ) 11 | DB_URL_SECOND = (DB_URL + "_second") if DB_URL != MEMORY_SQLITE else MEMORY_SQLITE 12 | 13 | TORTOISE_ORM = { 14 | "connections": { 15 | "default": DB_URL.replace(MEMORY_SQLITE, "sqlite://db.sqlite3"), 16 | "second": DB_URL_SECOND.replace(MEMORY_SQLITE, "sqlite://db_second.sqlite3"), 17 | }, 18 | "apps": { 19 | "models": {"models": ["models", "aerich.models"], "default_connection": "default"}, 20 | "models_second": {"models": ["models_second"], "default_connection": "second"}, 21 | }, 22 | } 23 | -------------------------------------------------------------------------------- /tests/assets/migrate_no_input/_tests.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from pathlib import Path 4 | 5 | import pytest 6 | from asyncclick.testing import CliRunner 7 | 8 | from aerich.cli import cli 9 | from aerich.migrate import Migrate 10 | 11 | 12 | @pytest.fixture(scope="session") 13 | def anyio_backend() -> str: 14 | return "asyncio" 15 | 16 | 17 | @pytest.mark.anyio 18 | async def test_migrate(): 19 | runner = CliRunner() 20 | # Default to abort without deleting previous generated migration files 21 | result = await runner.invoke(cli, ["migrate"], input="\n") 22 | assert not result.exception 23 | assert "it" in result.output 24 | warning_msg = ( 25 | "Aborted! You may need to run `aerich heads` to list avaliable unapplied migrations." 26 | ) 27 | assert warning_msg in result.output 28 | migrate_dir = Path(Migrate.migrate_location) 29 | extra_migration_file = migrate_dir.joinpath("1_datetime_update.py") 30 | extra_migration_file.touch() 31 | pre_migration_files = list(migrate_dir.glob("1_*.py")) 32 | updated_at_0 = pre_migration_files[0].stat().st_mtime 33 | # Delete migration files that with same version num when explicit input True 34 | result = await runner.invoke(cli, ["migrate"], input="True\n") 35 | assert not result.exception 36 | assert "them" in result.output 37 | assert all(i.name in result.output for i in pre_migration_files) 38 | assert not extra_migration_file.exists() 39 | new_migration_files = list(migrate_dir.glob("1_*.py")) 40 | assert len(new_migration_files) == 1 41 | updated_at = new_migration_files[0].stat().st_mtime 42 | assert updated_at > updated_at_0 43 | # Delete migration files without ask for prompt when --no-input passed 44 | result = await runner.invoke(cli, ["migrate", "--no-input"]) 45 | assert not result.exception 46 | assert "them" not in result.output and "it" not in result.output 47 | latest_migration_files = list(migrate_dir.glob("1_*.py")) 48 | assert len(latest_migration_files) == 1 49 | updated_at_2 = latest_migration_files[0].stat().st_mtime 50 | assert updated_at_2 > updated_at 51 | -------------------------------------------------------------------------------- /tests/assets/migrate_no_input/models.py: -------------------------------------------------------------------------------- 1 | from tortoise import Model, fields 2 | 3 | 4 | class Foo(Model): 5 | name = fields.CharField(10) 6 | -------------------------------------------------------------------------------- /tests/assets/migrate_no_input/settings.py: -------------------------------------------------------------------------------- 1 | TORTOISE_ORM = { 2 | "connections": {"default": "sqlite://db.sqlite3"}, 3 | "apps": {"models": {"models": ["models", "aerich.models"]}}, 4 | } 5 | -------------------------------------------------------------------------------- /tests/assets/missing_aerich_models/models.py: -------------------------------------------------------------------------------- 1 | from tortoise import Model, fields 2 | 3 | 4 | class Foo(Model): 5 | a = fields.IntField() 6 | -------------------------------------------------------------------------------- /tests/assets/missing_aerich_models/settings.py: -------------------------------------------------------------------------------- 1 | import os 2 | from datetime import date 3 | 4 | from tortoise.contrib.test import MEMORY_SQLITE 5 | 6 | DB_URL = ( 7 | _u.replace("\\{\\}", f"aerich_missing_models_{date.today():%Y%m%d}") 8 | if (_u := os.getenv("TEST_DB")) 9 | else MEMORY_SQLITE 10 | ) 11 | 12 | TORTOISE_ORM = { 13 | "connections": { 14 | "default": DB_URL.replace(MEMORY_SQLITE, "sqlite://db.sqlite3"), 15 | }, 16 | "apps": {"models": {"models": ["models", "aerich.models"]}}, 17 | } 18 | TORTOISE_ORM_NO_AERICH_MODELS = { 19 | **TORTOISE_ORM, 20 | "apps": { 21 | "models": {"models": ["models"]}, 22 | }, 23 | } 24 | -------------------------------------------------------------------------------- /tests/assets/remove_constraint/_tests.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from models import Foo, Sth 3 | from settings import TORTOISE_ORM 4 | from tortoise import Tortoise 5 | from tortoise.exceptions import OperationalError 6 | 7 | 8 | @pytest.fixture(scope="session") 9 | def anyio_backend() -> str: 10 | return "asyncio" 11 | 12 | 13 | @pytest.fixture(autouse=True) 14 | async def init_connections(): 15 | await Tortoise.init(TORTOISE_ORM) 16 | try: 17 | yield 18 | finally: 19 | await Tortoise.close_connections() 20 | 21 | 22 | @pytest.mark.anyio 23 | async def test_init_db(): 24 | await Foo.create(a=1, b=1, c=1) 25 | with pytest.raises(OperationalError): 26 | await Foo.create(a=1, b=1, c=2) 27 | with pytest.raises(OperationalError): 28 | await Foo.create(a=1, b=2, c=1) 29 | with pytest.raises(OperationalError): 30 | await Sth.create(a=1, b=1, c=1, d=1) 31 | await Sth.create(a=1, b=1, c=1, d=1) 32 | 33 | 34 | @pytest.mark.anyio 35 | async def test_models_2(): 36 | await Foo.create(a=2, b=2, c=2) 37 | await Foo.create(a=2, b=2, c=3) 38 | with pytest.raises(OperationalError): 39 | await Foo.create(a=2, b=2, c=3) 40 | await Sth.create(a=2, b=2, c=2, d=2) 41 | await Sth.create(a=3, b=2, c=2, d=2) 42 | with pytest.raises(OperationalError): 43 | await Sth.create(a=3, b=2, c=2, d=2) 44 | 45 | 46 | @pytest.mark.anyio 47 | async def test_models_3(): 48 | await Sth.create(a=3, b=3, c=3, d=3, e=3, f=3) 49 | with pytest.raises(OperationalError): 50 | await Sth.create(a=3, b=3, c=3, d=3, e=3, f=4) 51 | with pytest.raises(OperationalError): 52 | await Sth.create(a=3, b=4, c=3, d=3, e=3, f=3) 53 | 54 | 55 | @pytest.mark.anyio 56 | async def test_models_4(): 57 | from models import New 58 | 59 | await New.create(a=1, b=1) 60 | with pytest.raises(OperationalError): 61 | await New.create(a=1, b=1) 62 | 63 | 64 | @pytest.mark.anyio 65 | async def test_models_5(): 66 | from models import New 67 | 68 | await New.create(a2=2, b2=2) 69 | with pytest.raises(OperationalError): 70 | await New.create(a2=2, b2=2) 71 | -------------------------------------------------------------------------------- /tests/assets/remove_constraint/db.py: -------------------------------------------------------------------------------- 1 | import asyncclick as click 2 | from settings import TORTOISE_ORM 3 | 4 | from tests._utils import drop_db, init_db 5 | 6 | 7 | @click.group() 8 | def cli(): ... 9 | 10 | 11 | @cli.command() 12 | async def create(): 13 | await init_db(TORTOISE_ORM, False) 14 | click.echo(f"Success to create databases for {TORTOISE_ORM['connections']}") 15 | 16 | 17 | @cli.command() 18 | async def drop(): 19 | await drop_db(TORTOISE_ORM) 20 | click.echo(f"Dropped databases for {TORTOISE_ORM['connections']}") 21 | 22 | 23 | def main(): 24 | cli() 25 | 26 | 27 | if __name__ == "__main__": 28 | main() 29 | -------------------------------------------------------------------------------- /tests/assets/remove_constraint/models.py: -------------------------------------------------------------------------------- 1 | from tortoise import Model, fields 2 | 3 | 4 | class Foo(Model): 5 | a = fields.IntField() 6 | b = fields.IntField() 7 | c = fields.IntField(unique=True) 8 | 9 | class Meta: 10 | unique_together = ("a", "b") 11 | 12 | 13 | class Sth(Model): 14 | a = fields.IntField() 15 | b = fields.IntField() 16 | c = fields.IntField() 17 | d = fields.IntField() 18 | 19 | class Meta: 20 | unique_together = [("a", "b"), ("c", "d")] 21 | -------------------------------------------------------------------------------- /tests/assets/remove_constraint/models_2.py: -------------------------------------------------------------------------------- 1 | from tortoise import Model, fields 2 | 3 | 4 | class Foo(Model): 5 | a = fields.IntField() 6 | b = fields.IntField() 7 | c = fields.IntField(unique=True) 8 | 9 | 10 | class Sth(Model): 11 | a = fields.IntField() 12 | b = fields.IntField() 13 | c = fields.IntField() 14 | d = fields.IntField() 15 | 16 | class Meta: 17 | unique_together = [("a", "b")] 18 | -------------------------------------------------------------------------------- /tests/assets/remove_constraint/models_3.py: -------------------------------------------------------------------------------- 1 | from tortoise import Model, fields 2 | 3 | 4 | class Foo(Model): 5 | a = fields.IntField() 6 | b = fields.IntField() 7 | c = fields.IntField(unique=True) 8 | 9 | 10 | class Sth(Model): 11 | a = fields.IntField() 12 | b = fields.IntField() 13 | c = fields.IntField() 14 | d = fields.IntField() 15 | e = fields.IntField(null=True) 16 | f = fields.IntField(null=True) 17 | 18 | class Meta: 19 | unique_together = [("a", "b"), ("e", "f")] 20 | -------------------------------------------------------------------------------- /tests/assets/remove_constraint/models_4.py: -------------------------------------------------------------------------------- 1 | from tortoise import Model, fields 2 | 3 | 4 | class Foo(Model): 5 | a = fields.IntField() 6 | b = fields.IntField() 7 | c = fields.IntField(unique=True) 8 | 9 | 10 | class Sth(Model): 11 | a = fields.IntField() 12 | b = fields.IntField() 13 | c = fields.IntField() 14 | d = fields.IntField() 15 | e = fields.IntField(null=True) 16 | f = fields.IntField(null=True) 17 | 18 | class Meta: 19 | unique_together = [("a", "b"), ("e", "f")] 20 | 21 | 22 | class New(Model): 23 | a = fields.IntField() 24 | b = fields.IntField() 25 | 26 | class Meta: 27 | unique_together = [("a", "b")] 28 | -------------------------------------------------------------------------------- /tests/assets/remove_constraint/models_5.py: -------------------------------------------------------------------------------- 1 | from tortoise import Model, fields 2 | 3 | 4 | class Foo(Model): 5 | a = fields.IntField() 6 | b = fields.IntField() 7 | c = fields.IntField(unique=True) 8 | 9 | 10 | class Sth(Model): 11 | a = fields.IntField() 12 | b = fields.IntField() 13 | c = fields.IntField() 14 | d = fields.IntField() 15 | e = fields.IntField(null=True) 16 | f = fields.IntField(null=True) 17 | 18 | class Meta: 19 | unique_together = [("a", "b"), ("e", "f")] 20 | 21 | 22 | class New(Model): 23 | a2 = fields.IntField() 24 | b2 = fields.IntField() 25 | 26 | class Meta: 27 | unique_together = [("a2", "b2")] 28 | -------------------------------------------------------------------------------- /tests/assets/remove_constraint/settings.py: -------------------------------------------------------------------------------- 1 | import os 2 | from datetime import date 3 | 4 | from tortoise.contrib.test import MEMORY_SQLITE 5 | 6 | DB_URL = ( 7 | _u.replace("\\{\\}", f"aerich_remove_constraint_{date.today():%Y%m%d}") 8 | if (_u := os.getenv("TEST_DB")) 9 | else MEMORY_SQLITE 10 | ) 11 | DB_URL_SECOND = (DB_URL + "_second") if DB_URL != MEMORY_SQLITE else MEMORY_SQLITE 12 | 13 | TORTOISE_ORM = { 14 | "connections": { 15 | "default": DB_URL.replace(MEMORY_SQLITE, "sqlite://db.sqlite3"), 16 | }, 17 | "apps": { 18 | "models": {"models": ["models", "aerich.models"], "default_connection": "default"}, 19 | }, 20 | } 21 | -------------------------------------------------------------------------------- /tests/assets/sqlite_migrate/_tests.py: -------------------------------------------------------------------------------- 1 | import uuid 2 | 3 | import pytest 4 | from models import Foo 5 | from tortoise.exceptions import IntegrityError 6 | 7 | 8 | @pytest.mark.asyncio 9 | async def test_allow_duplicate() -> None: 10 | await Foo.all().delete() 11 | await Foo.create(name="foo") 12 | obj = await Foo.create(name="foo") 13 | assert (await Foo.all().count()) == 2 14 | await obj.delete() 15 | 16 | 17 | @pytest.mark.asyncio 18 | async def test_unique_is_true() -> None: 19 | with pytest.raises(IntegrityError): 20 | await Foo.create(name="foo") 21 | await Foo.create(name="foo") 22 | 23 | 24 | @pytest.mark.asyncio 25 | async def test_add_unique_field() -> None: 26 | if not await Foo.filter(age=0).exists(): 27 | await Foo.create(name="0_" + uuid.uuid4().hex, age=0) 28 | with pytest.raises(IntegrityError): 29 | await Foo.create(name=uuid.uuid4().hex, age=0) 30 | 31 | 32 | @pytest.mark.asyncio 33 | async def test_drop_unique_field() -> None: 34 | name = "1_" + uuid.uuid4().hex 35 | await Foo.create(name=name, age=0) 36 | assert await Foo.filter(name=name).exists() 37 | 38 | 39 | @pytest.mark.asyncio 40 | async def test_with_age_field() -> None: 41 | name = "2_" + uuid.uuid4().hex 42 | await Foo.create(name=name, age=0) 43 | obj = await Foo.get(name=name) 44 | assert obj.age == 0 45 | 46 | 47 | @pytest.mark.asyncio 48 | async def test_without_age_field() -> None: 49 | name = "3_" + uuid.uuid4().hex 50 | await Foo.create(name=name, age=0) 51 | obj = await Foo.get(name=name) 52 | assert getattr(obj, "age", None) is None 53 | 54 | 55 | @pytest.mark.asyncio 56 | async def test_m2m_with_custom_through() -> None: 57 | from models import FooGroup, Group 58 | 59 | name = "4_" + uuid.uuid4().hex 60 | foo = await Foo.create(name=name) 61 | group = await Group.create(name=name + "1") 62 | await FooGroup.all().delete() 63 | await foo.groups.add(group) 64 | foo_group = await FooGroup.get(foo=foo, group=group) 65 | assert not foo_group.is_active 66 | 67 | 68 | @pytest.mark.asyncio 69 | async def test_add_m2m_field_after_init_db() -> None: 70 | from models import Group 71 | 72 | name = "5_" + uuid.uuid4().hex 73 | foo = await Foo.create(name=name) 74 | group = await Group.create(name=name + "1") 75 | await foo.groups.add(group) 76 | assert (await group.users.all().first()) == foo 77 | -------------------------------------------------------------------------------- /tests/assets/sqlite_migrate/conftest_.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import asyncio 4 | from collections.abc import Generator 5 | 6 | import pytest 7 | import pytest_asyncio 8 | import settings 9 | from tortoise import Tortoise, connections 10 | 11 | 12 | @pytest.fixture(scope="session") 13 | def event_loop() -> Generator: 14 | policy = asyncio.get_event_loop_policy() 15 | res = policy.new_event_loop() 16 | asyncio.set_event_loop(res) 17 | res._close = res.close # type:ignore[attr-defined] 18 | res.close = lambda: None # type:ignore[method-assign] 19 | 20 | yield res 21 | 22 | res._close() # type:ignore[attr-defined] 23 | 24 | 25 | @pytest_asyncio.fixture(scope="session", autouse=True) 26 | async def api(event_loop, request): 27 | await Tortoise.init(config=settings.TORTOISE_ORM) 28 | request.addfinalizer(lambda: event_loop.run_until_complete(connections.close_all(discard=True))) 29 | -------------------------------------------------------------------------------- /tests/assets/sqlite_migrate/models.py: -------------------------------------------------------------------------------- 1 | from tortoise import Model, fields 2 | 3 | 4 | class Foo(Model): 5 | name = fields.CharField(max_length=60, db_index=False) 6 | -------------------------------------------------------------------------------- /tests/assets/sqlite_migrate/settings.py: -------------------------------------------------------------------------------- 1 | TORTOISE_ORM = { 2 | "connections": {"default": "sqlite://db.sqlite3"}, 3 | "apps": {"models": {"models": ["models", "aerich.models"]}}, 4 | } 5 | -------------------------------------------------------------------------------- /tests/indexes.py: -------------------------------------------------------------------------------- 1 | from tortoise.indexes import Index 2 | 3 | 4 | class CustomIndex(Index): 5 | def __init__(self, *args, **kw) -> None: 6 | super().__init__(*args, **kw) 7 | self._foo = "" 8 | -------------------------------------------------------------------------------- /tests/models.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import datetime 4 | import uuid 5 | from enum import IntEnum 6 | 7 | from tortoise import Model, fields 8 | from tortoise.contrib.mysql.indexes import FullTextIndex 9 | from tortoise.contrib.postgres.indexes import HashIndex 10 | from tortoise.indexes import Index 11 | 12 | from tests._utils import Dialect 13 | from tests.indexes import CustomIndex 14 | 15 | 16 | class ProductType(IntEnum): 17 | article = 1 18 | page = 2 19 | 20 | 21 | class PermissionAction(IntEnum): 22 | create = 1 23 | delete = 2 24 | update = 3 25 | read = 4 26 | 27 | 28 | class Status(IntEnum): 29 | on = 1 30 | off = 0 31 | 32 | 33 | class User(Model): 34 | username = fields.CharField(max_length=20, unique=True) 35 | password = fields.CharField(max_length=100) 36 | last_login = fields.DatetimeField(description="Last Login", default=datetime.datetime.now) 37 | is_active = fields.BooleanField(default=True, description="Is Active") 38 | is_superuser = fields.BooleanField(default=False, description="Is SuperUser") 39 | intro = fields.TextField(default="") 40 | longitude = fields.DecimalField(max_digits=10, decimal_places=8) 41 | 42 | products: fields.ManyToManyRelation[Product] 43 | 44 | class Meta: 45 | # reverse indexes elements 46 | indexes = [CustomIndex(fields=("is_superuser",)), Index(fields=("username", "is_active"))] 47 | 48 | 49 | class Email(Model): 50 | email_id = fields.IntField(primary_key=True) 51 | email = fields.CharField(max_length=200, db_index=True) 52 | company = fields.CharField(max_length=100, db_index=True, unique=True) 53 | is_primary = fields.BooleanField(default=False) 54 | address = fields.CharField(max_length=200) 55 | users: fields.ManyToManyRelation[User] = fields.ManyToManyField("models.User") 56 | config: fields.OneToOneRelation[Config] = fields.OneToOneField("models.Config") 57 | 58 | 59 | def default_name(): 60 | return uuid.uuid4() 61 | 62 | 63 | class Category(Model): 64 | slug = fields.CharField(max_length=100) 65 | name = fields.CharField(max_length=200, null=True, default=default_name) 66 | owner: fields.ForeignKeyRelation[User] = fields.ForeignKeyField( 67 | "models.User", description="User" 68 | ) 69 | title = fields.CharField(max_length=20, unique=False) 70 | created_at = fields.DatetimeField(auto_now_add=True) 71 | 72 | class Meta: 73 | if Dialect.is_postgres(): 74 | indexes = [HashIndex(fields=("slug",))] 75 | elif Dialect.is_mysql(): 76 | indexes = [FullTextIndex(fields=("slug",))] # type:ignore 77 | else: 78 | indexes = [Index(fields=("slug",))] # type:ignore 79 | 80 | 81 | class Product(Model): 82 | id = fields.BigIntField(primary_key=True) 83 | categories: fields.ManyToManyRelation[Category] = fields.ManyToManyField( 84 | "models.Category", null=False 85 | ) 86 | users: fields.ManyToManyRelation[User] = fields.ManyToManyField( 87 | "models.User", related_name="products" 88 | ) 89 | name = fields.CharField(max_length=50) 90 | view_num = fields.IntField(description="View Num", default=0) 91 | sort = fields.IntField() 92 | is_reviewed = fields.BooleanField(description="Is Reviewed") 93 | type: int = fields.IntEnumField( 94 | ProductType, description="Product Type", source_field="type_db_alias" 95 | ) 96 | pic = fields.CharField(max_length=200) 97 | body = fields.TextField() 98 | price = fields.FloatField(null=True) 99 | no = fields.UUIDField(db_index=True) 100 | created_at = fields.DatetimeField(auto_now_add=True) 101 | is_deleted = fields.BooleanField(default=False) 102 | 103 | class Meta: 104 | unique_together = (("name", "type"),) 105 | indexes = (("name", "type"),) 106 | managed = True 107 | 108 | 109 | class Config(Model): 110 | slug = fields.CharField(primary_key=True, max_length=20) 111 | categories: fields.ManyToManyRelation[Category] = fields.ManyToManyField( 112 | "models.Category", through="config_category_map", related_name="category_set" 113 | ) 114 | label = fields.CharField(max_length=200) 115 | key = fields.CharField(max_length=20) 116 | value: dict = fields.JSONField() 117 | status: Status = fields.IntEnumField(Status) 118 | user: fields.ForeignKeyRelation[User] = fields.ForeignKeyField( 119 | "models.User", description="User" 120 | ) 121 | 122 | email: fields.OneToOneRelation[Email] 123 | 124 | class Meta: 125 | managed = True 126 | 127 | 128 | class DontManageMe(Model): 129 | name = fields.CharField(max_length=50) 130 | 131 | class Meta: 132 | managed = False 133 | 134 | 135 | class Ignore(Model): 136 | class Meta: 137 | managed = False 138 | 139 | 140 | class NewModel(Model): 141 | name = fields.CharField(max_length=50) 142 | -------------------------------------------------------------------------------- /tests/models_second.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | from enum import IntEnum 3 | 4 | from tortoise import Model, fields 5 | 6 | 7 | class ProductType(IntEnum): 8 | article = 1 9 | page = 2 10 | 11 | 12 | class PermissionAction(IntEnum): 13 | create = 1 14 | delete = 2 15 | update = 3 16 | read = 4 17 | 18 | 19 | class Status(IntEnum): 20 | on = 1 21 | off = 0 22 | 23 | 24 | class User(Model): 25 | username = fields.CharField(max_length=20, unique=True) 26 | password = fields.CharField(max_length=200) 27 | last_login = fields.DatetimeField(description="Last Login", default=datetime.datetime.now) 28 | is_active = fields.BooleanField(default=True, description="Is Active") 29 | is_superuser = fields.BooleanField(default=False, description="Is SuperUser") 30 | avatar = fields.CharField(max_length=200, default="") 31 | intro = fields.TextField(default="") 32 | 33 | 34 | class Email(Model): 35 | email = fields.CharField(max_length=200) 36 | is_primary = fields.BooleanField(default=False) 37 | user: fields.ForeignKeyRelation[User] = fields.ForeignKeyField( 38 | "models_second.User", db_constraint=False 39 | ) 40 | 41 | 42 | class Category(Model): 43 | slug = fields.CharField(max_length=200) 44 | name = fields.CharField(max_length=200) 45 | user: fields.ForeignKeyRelation[User] = fields.ForeignKeyField( 46 | "models_second.User", description="User" 47 | ) 48 | created_at = fields.DatetimeField(auto_now_add=True) 49 | 50 | 51 | class Product(Model): 52 | categories: fields.ManyToManyRelation[Category] = fields.ManyToManyField( 53 | "models_second.Category" 54 | ) 55 | name = fields.CharField(max_length=50) 56 | view_num = fields.IntField(description="View Num") 57 | sort = fields.IntField() 58 | is_reviewed = fields.BooleanField(description="Is Reviewed") 59 | type: int = fields.IntEnumField( 60 | ProductType, description="Product Type", source_field="type_db_alias" 61 | ) 62 | image = fields.CharField(max_length=200) 63 | body = fields.TextField() 64 | created_at = fields.DatetimeField(auto_now_add=True) 65 | 66 | 67 | class Config(Model): 68 | label = fields.CharField(max_length=200) 69 | key = fields.CharField(max_length=20) 70 | value: dict = fields.JSONField() 71 | status: Status = fields.IntEnumField(Status, default=Status.on) 72 | -------------------------------------------------------------------------------- /tests/old_models.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | from enum import IntEnum 3 | 4 | from tortoise import Model, fields 5 | from tortoise.indexes import Index 6 | 7 | from tests.indexes import CustomIndex 8 | 9 | 10 | class ProductType(IntEnum): 11 | article = 1 12 | page = 2 13 | 14 | 15 | class PermissionAction(IntEnum): 16 | create = 1 17 | delete = 2 18 | update = 3 19 | read = 4 20 | 21 | 22 | class Status(IntEnum): 23 | on = 1 24 | off = 0 25 | 26 | 27 | class User(Model): 28 | username = fields.CharField(max_length=20) 29 | password = fields.CharField(max_length=200) 30 | last_login = fields.DatetimeField(description="Last Login", default=datetime.datetime.now) 31 | is_active = fields.BooleanField(default=True, description="Is Active") 32 | is_superuser = fields.BooleanField(default=False, description="Is SuperUser") 33 | avatar = fields.CharField(max_length=200, default="") 34 | intro = fields.TextField(default="") 35 | longitude = fields.DecimalField(max_digits=12, decimal_places=9) 36 | 37 | class Meta: 38 | indexes = [Index(fields=("username", "is_active")), CustomIndex(fields=("is_superuser",))] 39 | 40 | 41 | class Email(Model): 42 | email = fields.CharField(max_length=200) 43 | company = fields.CharField(max_length=100, db_index=True) 44 | is_primary = fields.BooleanField(default=False) 45 | user: fields.ForeignKeyRelation[User] = fields.ForeignKeyField( 46 | "models.User", db_constraint=False 47 | ) 48 | 49 | 50 | class Category(Model): 51 | slug = fields.CharField(max_length=200) 52 | name = fields.CharField(max_length=200) 53 | user: fields.ForeignKeyRelation[User] = fields.ForeignKeyField( 54 | "models.User", description="User" 55 | ) 56 | title = fields.CharField(max_length=20, unique=True) 57 | created_at = fields.DatetimeField(auto_now_add=True) 58 | 59 | class Meta: 60 | indexes = [Index(fields=("slug",))] 61 | 62 | 63 | class Product(Model): 64 | categories: fields.ManyToManyRelation[Category] = fields.ManyToManyField("models.Category") 65 | uid = fields.IntField(source_field="uuid", unique=True) 66 | name = fields.CharField(max_length=50) 67 | view_num = fields.IntField(description="View Num") 68 | sort = fields.IntField() 69 | is_review = fields.BooleanField(description="Is Reviewed") 70 | type: int = fields.IntEnumField( 71 | ProductType, description="Product Type", source_field="type_db_alias" 72 | ) 73 | image = fields.CharField(max_length=200) 74 | body = fields.TextField() 75 | created_at = fields.DatetimeField(auto_now_add=True) 76 | is_delete = fields.BooleanField(default=False) 77 | 78 | 79 | class Config(Model): 80 | slug = fields.CharField(primary_key=True, max_length=10) 81 | category: fields.ManyToManyRelation[Category] = fields.ManyToManyField("models.Category") 82 | categories: fields.ManyToManyRelation[Category] = fields.ManyToManyField( 83 | "models.Category", through="config_category_map", related_name="config_set" 84 | ) 85 | name = fields.CharField(max_length=100, unique=True) 86 | label = fields.CharField(max_length=200) 87 | key = fields.CharField(max_length=20) 88 | value: dict = fields.JSONField() 89 | status: Status = fields.IntEnumField(Status, default=Status.on) 90 | 91 | class Meta: 92 | table = "configs" 93 | 94 | 95 | class DontManageMe(Model): 96 | name = fields.CharField(max_length=50) 97 | 98 | class Meta: 99 | table = "dont_manage" 100 | 101 | 102 | class Ignore(Model): 103 | name = fields.CharField(max_length=50) 104 | 105 | class Meta: 106 | managed = True 107 | 108 | 109 | def main() -> None: 110 | """Generate a python file for the old_models_describe""" 111 | from pathlib import Path 112 | 113 | from tortoise import run_async 114 | from tortoise.contrib.test import init_memory_sqlite 115 | 116 | from aerich.utils import get_models_describe 117 | 118 | @init_memory_sqlite 119 | async def run() -> None: 120 | old_models_describe = get_models_describe("models") 121 | p = Path("old_models_describe.py") 122 | p.write_text(f"{old_models_describe = }", encoding="utf-8") 123 | print(f"Write value to {p}\nYou can reformat it by `ruff format {p}`") 124 | 125 | run_async(run()) 126 | 127 | 128 | if __name__ == "__main__": 129 | main() 130 | -------------------------------------------------------------------------------- /tests/test_cli.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import shutil 4 | import time 5 | from collections.abc import Generator 6 | from pathlib import Path 7 | 8 | import pytest 9 | 10 | from tests._utils import chdir, run_shell 11 | 12 | 13 | @pytest.fixture 14 | def new_project(tmp_path: Path) -> Generator[Path]: 15 | asset_dir = Path(__file__).parent / "assets" / "migrate_no_input" 16 | with chdir(tmp_path): 17 | for file in asset_dir.glob("*.py"): 18 | shutil.copy(file, file.name) 19 | run_shell("aerich init -t settings.TORTOISE_ORM", capture_output=False) 20 | run_shell("aerich init-db", capture_output=False) 21 | yield tmp_path 22 | 23 | 24 | def test_empty_migrate_with_no_input(new_project: Path) -> None: 25 | output = run_shell("aerich migrate", cwd=new_project) 26 | assert "No changes detected" in output 27 | output = run_shell("aerich migrate --empty", cwd=new_project) 28 | assert "Success" in output 29 | migrate_dir = Path("migrations/models") 30 | empty_migration_files = list(migrate_dir.glob("1_*.py")) 31 | assert len(empty_migration_files) == 1 32 | time.sleep(1) # ensure new migration filename generated. 33 | run_shell("aerich migrate --empty --no-input", cwd=new_project) 34 | new_empty_migration_files = list(migrate_dir.glob("1_*.py")) 35 | assert len(new_empty_migration_files) == 1 36 | assert empty_migration_files != new_empty_migration_files 37 | 38 | 39 | @pytest.fixture 40 | async def project_with_unapplied_migrations(new_project: Path) -> None: 41 | models_py = Path("models.py") 42 | text = models_py.read_text() 43 | if "age" not in text: 44 | models_py.write_text(text + " age=fields.IntField()\n") 45 | run_shell("aerich migrate", cwd=new_project) 46 | 47 | 48 | def test_migrate_with_same_version_file_exists(project_with_unapplied_migrations) -> None: 49 | # CliRunner change the entire interpreter state, so run it in subprocess 50 | output = run_shell("pytest _tests.py") 51 | assert "1 passed" in output 52 | 53 | 54 | def test_missing_aerich_models(tmp_path: Path) -> None: 55 | asset_dir = Path(__file__).parent / "assets" / "missing_aerich_models" 56 | with chdir(tmp_path): 57 | for file in asset_dir.glob("*.py"): 58 | shutil.copy(file, file.name) 59 | run_shell("aerich init -t settings.TORTOISE_ORM_NO_AERICH_MODELS", capture_output=False) 60 | output = run_shell("aerich init-db") 61 | assert "You have to add 'aerich.models' in the models of your tortoise config" in output 62 | output = run_shell("aerich migrate") 63 | assert "need to run `aerich init-db` first" in output 64 | output = run_shell("aerich upgrade") 65 | assert "need to run `aerich init-db` first" in output 66 | Path("migrations", "models").mkdir() 67 | output = run_shell("aerich migrate") 68 | assert "You have to add 'aerich.models' in the models of your tortoise config" in output 69 | output = run_shell("aerich upgrade") 70 | assert "You have to add 'aerich.models' in the models of your tortoise config" in output 71 | -------------------------------------------------------------------------------- /tests/test_command.py: -------------------------------------------------------------------------------- 1 | from aerich import Command 2 | from conftest import tortoise_orm 3 | 4 | 5 | async def test_command(mocker): 6 | mocker.patch("os.listdir", return_value=[]) 7 | async with Command(tortoise_orm) as command: 8 | history = await command.history() 9 | heads = await command.heads() 10 | assert history == [] 11 | assert heads == [] 12 | -------------------------------------------------------------------------------- /tests/test_ddl.py: -------------------------------------------------------------------------------- 1 | import tortoise 2 | 3 | from aerich.ddl.mysql import MysqlDDL 4 | from aerich.ddl.postgres import PostgresDDL 5 | from aerich.ddl.sqlite import SqliteDDL 6 | from aerich.migrate import Migrate 7 | from tests.models import Category, Product, User 8 | 9 | 10 | def test_create_table(): 11 | ret = Migrate.ddl.create_table(Category) 12 | if isinstance(Migrate.ddl, MysqlDDL): 13 | if tortoise.__version__ >= "0.24": 14 | assert ( 15 | ret 16 | == """CREATE TABLE IF NOT EXISTS `category` ( 17 | `id` INT NOT NULL PRIMARY KEY AUTO_INCREMENT, 18 | `slug` VARCHAR(100) NOT NULL, 19 | `name` VARCHAR(200), 20 | `title` VARCHAR(20) NOT NULL, 21 | `created_at` DATETIME(6) NOT NULL DEFAULT CURRENT_TIMESTAMP(6), 22 | `owner_id` INT NOT NULL COMMENT 'User', 23 | CONSTRAINT `fk_category_user_110d4c63` FOREIGN KEY (`owner_id`) REFERENCES `user` (`id`) ON DELETE CASCADE, 24 | FULLTEXT KEY `idx_category_slug_e9bcff` (`slug`) 25 | ) CHARACTER SET utf8mb4""" 26 | ) 27 | return 28 | assert ( 29 | ret 30 | == """CREATE TABLE IF NOT EXISTS `category` ( 31 | `id` INT NOT NULL PRIMARY KEY AUTO_INCREMENT, 32 | `slug` VARCHAR(100) NOT NULL, 33 | `name` VARCHAR(200), 34 | `title` VARCHAR(20) NOT NULL, 35 | `created_at` DATETIME(6) NOT NULL DEFAULT CURRENT_TIMESTAMP(6), 36 | `owner_id` INT NOT NULL COMMENT 'User', 37 | CONSTRAINT `fk_category_user_110d4c63` FOREIGN KEY (`owner_id`) REFERENCES `user` (`id`) ON DELETE CASCADE 38 | ) CHARACTER SET utf8mb4; 39 | CREATE FULLTEXT INDEX `idx_category_slug_e9bcff` ON `category` (`slug`)""" 40 | ) 41 | 42 | elif isinstance(Migrate.ddl, SqliteDDL): 43 | exists = "IF NOT EXISTS " if tortoise.__version__ >= "0.24" else "" 44 | assert ( 45 | ret 46 | == f"""CREATE TABLE IF NOT EXISTS "category" ( 47 | "id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, 48 | "slug" VARCHAR(100) NOT NULL, 49 | "name" VARCHAR(200), 50 | "title" VARCHAR(20) NOT NULL, 51 | "created_at" TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, 52 | "owner_id" INT NOT NULL REFERENCES "user" ("id") ON DELETE CASCADE /* User */ 53 | ); 54 | CREATE INDEX {exists}"idx_category_slug_e9bcff" ON "category" ("slug")""" 55 | ) 56 | 57 | elif isinstance(Migrate.ddl, PostgresDDL): 58 | assert ( 59 | ret 60 | == """CREATE TABLE IF NOT EXISTS "category" ( 61 | "id" SERIAL NOT NULL PRIMARY KEY, 62 | "slug" VARCHAR(100) NOT NULL, 63 | "name" VARCHAR(200), 64 | "title" VARCHAR(20) NOT NULL, 65 | "created_at" TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP, 66 | "owner_id" INT NOT NULL REFERENCES "user" ("id") ON DELETE CASCADE 67 | ); 68 | CREATE INDEX IF NOT EXISTS "idx_category_slug_e9bcff" ON "category" USING HASH ("slug"); 69 | COMMENT ON COLUMN "category"."owner_id" IS 'User'""" 70 | ) 71 | 72 | 73 | def test_drop_table(): 74 | ret = Migrate.ddl.drop_table(Category._meta.db_table) 75 | if isinstance(Migrate.ddl, MysqlDDL): 76 | assert ret == "DROP TABLE IF EXISTS `category`" 77 | else: 78 | assert ret == 'DROP TABLE IF EXISTS "category"' 79 | 80 | 81 | def test_add_column(): 82 | ret = Migrate.ddl.add_column(Category, Category._meta.fields_map["name"].describe(False)) 83 | if isinstance(Migrate.ddl, MysqlDDL): 84 | assert ret == "ALTER TABLE `category` ADD `name` VARCHAR(200)" 85 | else: 86 | assert ret == 'ALTER TABLE "category" ADD "name" VARCHAR(200)' 87 | # add unique column 88 | ret = Migrate.ddl.add_column(User, User._meta.fields_map["username"].describe(False)) 89 | if isinstance(Migrate.ddl, MysqlDDL): 90 | assert ret == "ALTER TABLE `user` ADD `username` VARCHAR(20) NOT NULL UNIQUE" 91 | elif isinstance(Migrate.ddl, PostgresDDL): 92 | assert ret == 'ALTER TABLE "user" ADD "username" VARCHAR(20) NOT NULL UNIQUE' 93 | else: 94 | assert ret == 'ALTER TABLE "user" ADD "username" VARCHAR(20) NOT NULL' 95 | 96 | 97 | def test_modify_column(): 98 | if isinstance(Migrate.ddl, SqliteDDL): 99 | return 100 | 101 | ret0 = Migrate.ddl.modify_column(Category, Category._meta.fields_map["name"].describe(False)) 102 | ret1 = Migrate.ddl.modify_column(User, User._meta.fields_map["is_active"].describe(False)) 103 | if isinstance(Migrate.ddl, MysqlDDL): 104 | assert ret0 == "ALTER TABLE `category` MODIFY COLUMN `name` VARCHAR(200)" 105 | assert ( 106 | ret1 107 | == "ALTER TABLE `user` MODIFY COLUMN `is_active` BOOL NOT NULL COMMENT 'Is Active' DEFAULT 1" 108 | ) 109 | elif isinstance(Migrate.ddl, PostgresDDL): 110 | assert ( 111 | ret0 112 | == 'ALTER TABLE "category" ALTER COLUMN "name" TYPE VARCHAR(200) USING "name"::VARCHAR(200)' 113 | ) 114 | 115 | assert ( 116 | ret1 == 'ALTER TABLE "user" ALTER COLUMN "is_active" TYPE BOOL USING "is_active"::BOOL' 117 | ) 118 | 119 | 120 | def test_alter_column_default(): 121 | if isinstance(Migrate.ddl, SqliteDDL): 122 | return 123 | ret = Migrate.ddl.alter_column_default(User, User._meta.fields_map["intro"].describe(False)) 124 | if isinstance(Migrate.ddl, PostgresDDL): 125 | assert ret == 'ALTER TABLE "user" ALTER COLUMN "intro" SET DEFAULT \'\'' 126 | elif isinstance(Migrate.ddl, MysqlDDL): 127 | assert ret == "ALTER TABLE `user` ALTER COLUMN `intro` SET DEFAULT ''" 128 | 129 | ret = Migrate.ddl.alter_column_default( 130 | Category, Category._meta.fields_map["created_at"].describe(False) 131 | ) 132 | if isinstance(Migrate.ddl, PostgresDDL): 133 | assert ( 134 | ret == 'ALTER TABLE "category" ALTER COLUMN "created_at" SET DEFAULT CURRENT_TIMESTAMP' 135 | ) 136 | elif isinstance(Migrate.ddl, MysqlDDL): 137 | assert ( 138 | ret 139 | == "ALTER TABLE `category` ALTER COLUMN `created_at` SET DEFAULT CURRENT_TIMESTAMP(6)" 140 | ) 141 | 142 | ret = Migrate.ddl.alter_column_default( 143 | Product, Product._meta.fields_map["view_num"].describe(False) 144 | ) 145 | if isinstance(Migrate.ddl, PostgresDDL): 146 | assert ret == 'ALTER TABLE "product" ALTER COLUMN "view_num" SET DEFAULT 0' 147 | elif isinstance(Migrate.ddl, MysqlDDL): 148 | assert ret == "ALTER TABLE `product` ALTER COLUMN `view_num` SET DEFAULT 0" 149 | 150 | 151 | def test_alter_column_null(): 152 | if isinstance(Migrate.ddl, (SqliteDDL, MysqlDDL)): 153 | return 154 | ret = Migrate.ddl.alter_column_null(Category, Category._meta.fields_map["name"].describe(False)) 155 | if isinstance(Migrate.ddl, PostgresDDL): 156 | assert ret == 'ALTER TABLE "category" ALTER COLUMN "name" DROP NOT NULL' 157 | 158 | 159 | def test_set_comment(): 160 | if isinstance(Migrate.ddl, (SqliteDDL, MysqlDDL)): 161 | return 162 | ret = Migrate.ddl.set_comment(Category, Category._meta.fields_map["name"].describe(False)) 163 | assert ret == 'COMMENT ON COLUMN "category"."name" IS NULL' 164 | 165 | ret = Migrate.ddl.set_comment(Category, Category._meta.fields_map["owner"].describe(False)) 166 | assert ret == 'COMMENT ON COLUMN "category"."owner_id" IS \'User\'' 167 | 168 | 169 | def test_drop_column(): 170 | ret = Migrate.ddl.drop_column(Category, "name") 171 | if isinstance(Migrate.ddl, MysqlDDL): 172 | assert ret == "ALTER TABLE `category` DROP COLUMN `name`" 173 | elif isinstance(Migrate.ddl, PostgresDDL): 174 | assert ret == 'ALTER TABLE "category" DROP COLUMN "name"' 175 | 176 | 177 | def test_add_index(): 178 | index = Migrate.ddl.add_index(Category, ["name"]) 179 | index_u = Migrate.ddl.add_index(Category, ["name"], True) 180 | if isinstance(Migrate.ddl, MysqlDDL): 181 | assert index == "ALTER TABLE `category` ADD INDEX `idx_category_name_8b0cb9` (`name`)" 182 | assert index_u == "ALTER TABLE `category` ADD UNIQUE INDEX `name` (`name`)" 183 | elif isinstance(Migrate.ddl, PostgresDDL): 184 | assert ( 185 | index == 'CREATE INDEX IF NOT EXISTS "idx_category_name_8b0cb9" ON "category" ("name")' 186 | ) 187 | assert ( 188 | index_u 189 | == 'CREATE UNIQUE INDEX IF NOT EXISTS "uid_category_name_8b0cb9" ON "category" ("name")' 190 | ) 191 | else: 192 | assert index == 'CREATE INDEX "idx_category_name_8b0cb9" ON "category" ("name")' 193 | assert index_u == 'CREATE UNIQUE INDEX "uid_category_name_8b0cb9" ON "category" ("name")' 194 | 195 | 196 | def test_drop_index(): 197 | ret = Migrate.ddl.drop_index(Category, ["name"]) 198 | ret_u = Migrate.ddl.drop_index(Category, ["name"], True) 199 | if isinstance(Migrate.ddl, MysqlDDL): 200 | assert ret == "ALTER TABLE `category` DROP INDEX `idx_category_name_8b0cb9`" 201 | assert ret_u == "ALTER TABLE `category` DROP INDEX `name`" 202 | else: 203 | assert ret == 'DROP INDEX IF EXISTS "idx_category_name_8b0cb9"' 204 | assert ret_u == 'DROP INDEX IF EXISTS "uid_category_name_8b0cb9"' 205 | 206 | 207 | def test_add_fk(): 208 | ret = Migrate.ddl.add_fk( 209 | Category, Category._meta.fields_map["owner"].describe(False), User.describe(False) 210 | ) 211 | if isinstance(Migrate.ddl, MysqlDDL): 212 | assert ( 213 | ret 214 | == "ALTER TABLE `category` ADD CONSTRAINT `fk_category_user_110d4c63` FOREIGN KEY (`owner_id`) REFERENCES `user` (`id`) ON DELETE CASCADE" 215 | ) 216 | else: 217 | assert ( 218 | ret 219 | == 'ALTER TABLE "category" ADD CONSTRAINT "fk_category_user_110d4c63" FOREIGN KEY ("owner_id") REFERENCES "user" ("id") ON DELETE CASCADE' 220 | ) 221 | 222 | 223 | def test_drop_fk(): 224 | ret = Migrate.ddl.drop_fk( 225 | Category, Category._meta.fields_map["owner"].describe(False), User.describe(False) 226 | ) 227 | if isinstance(Migrate.ddl, MysqlDDL): 228 | assert ret == "ALTER TABLE `category` DROP FOREIGN KEY `fk_category_user_110d4c63`" 229 | elif isinstance(Migrate.ddl, PostgresDDL): 230 | assert ret == 'ALTER TABLE "category" DROP CONSTRAINT IF EXISTS "fk_category_user_110d4c63"' 231 | else: 232 | assert ret == 'ALTER TABLE "category" DROP FOREIGN KEY "fk_category_user_110d4c63"' 233 | -------------------------------------------------------------------------------- /tests/test_fake.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import os 4 | import re 5 | from pathlib import Path 6 | 7 | from tests._utils import Dialect, run_shell 8 | 9 | 10 | def _append_field(*files: str, name="field_1") -> None: 11 | for file in files: 12 | p = Path(file) 13 | field = f" {name} = fields.IntField(default=0)" 14 | with p.open("a") as f: 15 | f.write(os.linesep + field) 16 | 17 | 18 | def test_fake(new_aerich_project): 19 | if Dialect.is_sqlite(): 20 | # TODO: go ahead if sqlite alter-column supported 21 | return 22 | output = run_shell("aerich init -t settings.TORTOISE_ORM") 23 | assert "Success" in output 24 | output = run_shell("aerich init-db") 25 | assert "Success" in output 26 | output = run_shell("aerich --app models_second init-db") 27 | assert "Success" in output 28 | output = run_shell("pytest _tests.py::test_init_db") 29 | assert "error" not in output.lower() 30 | _append_field("models.py", "models_second.py") 31 | output = run_shell("aerich migrate") 32 | assert "Success" in output 33 | output = run_shell("aerich --app models_second migrate") 34 | assert "Success" in output 35 | output = run_shell("aerich upgrade --fake") 36 | assert "FAKED" in output 37 | output = run_shell("aerich --app models_second upgrade --fake") 38 | assert "FAKED" in output 39 | output = run_shell("pytest _tests.py::test_fake_field_1") 40 | assert "error" not in output.lower() 41 | _append_field("models.py", "models_second.py", name="field_2") 42 | output = run_shell("aerich migrate") 43 | assert "Success" in output 44 | output = run_shell("aerich --app models_second migrate") 45 | assert "Success" in output 46 | output = run_shell("aerich heads") 47 | assert "_update.py" in output 48 | output = run_shell("aerich upgrade --fake") 49 | assert "FAKED" in output 50 | output = run_shell("aerich --app models_second upgrade --fake") 51 | assert "FAKED" in output 52 | output = run_shell("pytest _tests.py::test_fake_field_2") 53 | assert "error" not in output.lower() 54 | output = run_shell("aerich heads") 55 | assert "No available heads." in output 56 | output = run_shell("aerich --app models_second heads") 57 | assert "No available heads." in output 58 | _append_field("models.py", "models_second.py", name="field_3") 59 | run_shell("aerich migrate", capture_output=False) 60 | run_shell("aerich --app models_second migrate", capture_output=False) 61 | run_shell("aerich upgrade --fake", capture_output=False) 62 | run_shell("aerich --app models_second upgrade --fake", capture_output=False) 63 | output = run_shell("aerich downgrade --fake -v 2 --yes", input="y\n") 64 | assert "FAKED" in output 65 | output = run_shell("aerich --app models_second downgrade --fake -v 2 --yes", input="y\n") 66 | assert "FAKED" in output 67 | output = run_shell("aerich heads") 68 | assert "No available heads." not in output 69 | assert not re.search(r"1_\d+_update\.py", output) 70 | assert re.search(r"2_\d+_update\.py", output) 71 | output = run_shell("aerich --app models_second heads") 72 | assert "No available heads." not in output 73 | assert not re.search(r"1_\d+_update\.py", output) 74 | assert re.search(r"2_\d+_update\.py", output) 75 | output = run_shell("aerich downgrade --fake -v 1 --yes", input="y\n") 76 | assert "FAKED" in output 77 | output = run_shell("aerich --app models_second downgrade --fake -v 1 --yes", input="y\n") 78 | assert "FAKED" in output 79 | output = run_shell("aerich heads") 80 | assert "No available heads." not in output 81 | assert re.search(r"1_\d+_update\.py", output) 82 | assert re.search(r"2_\d+_update\.py", output) 83 | output = run_shell("aerich --app models_second heads") 84 | assert "No available heads." not in output 85 | assert re.search(r"1_\d+_update\.py", output) 86 | assert re.search(r"2_\d+_update\.py", output) 87 | output = run_shell("aerich upgrade --fake") 88 | assert "FAKED" in output 89 | output = run_shell("aerich --app models_second upgrade --fake") 90 | assert "FAKED" in output 91 | output = run_shell("aerich heads") 92 | assert "No available heads." in output 93 | output = run_shell("aerich --app models_second heads") 94 | assert "No available heads." in output 95 | output = run_shell("aerich downgrade --fake -v 1 --yes", input="y\n") 96 | assert "FAKED" in output 97 | output = run_shell("aerich --app models_second downgrade --fake -v 1 --yes", input="y\n") 98 | assert "FAKED" in output 99 | output = run_shell("aerich heads") 100 | assert "No available heads." not in output 101 | assert re.search(r"1_\d+_update\.py", output) 102 | assert re.search(r"2_\d+_update\.py", output) 103 | output = run_shell("aerich --app models_second heads") 104 | assert "No available heads." not in output 105 | assert re.search(r"1_\d+_update\.py", output) 106 | assert re.search(r"2_\d+_update\.py", output) 107 | -------------------------------------------------------------------------------- /tests/test_inspectdb.py: -------------------------------------------------------------------------------- 1 | from tests._utils import Dialect, run_shell 2 | 3 | 4 | def test_inspect(new_aerich_project): 5 | if Dialect.is_sqlite(): 6 | # TODO: test sqlite after #384 fixed 7 | return 8 | run_shell("aerich init -t settings.TORTOISE_ORM") 9 | run_shell("aerich init-db") 10 | ret = run_shell("aerich inspectdb -t product") 11 | assert ret.startswith("from tortoise import Model, fields") 12 | assert "primary_key=True" in ret 13 | assert "fields.DatetimeField" in ret 14 | assert "fields.FloatField" in ret 15 | assert "fields.UUIDField" in ret 16 | if Dialect.is_mysql(): 17 | assert "db_index=True" in ret 18 | -------------------------------------------------------------------------------- /tests/test_python_m.py: -------------------------------------------------------------------------------- 1 | import subprocess # nosec 2 | from pathlib import Path 3 | 4 | from aerich.version import __version__ 5 | from tests._utils import chdir, run_shell 6 | 7 | 8 | def test_python_m_aerich(): 9 | assert __version__ in run_shell("python -m aerich --version") 10 | 11 | 12 | def test_poetry_add(tmp_path: Path): 13 | package = Path(__file__).parent.resolve().parent 14 | with chdir(tmp_path): 15 | subprocess.run(["poetry", "new", "foo"]) # nosec 16 | with chdir("foo"): 17 | r = subprocess.run(["poetry", "add", package]) # nosec 18 | assert r.returncode == 0 19 | -------------------------------------------------------------------------------- /tests/test_remove_unique_constraint.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import shutil 4 | from pathlib import Path 5 | 6 | from tests._utils import Dialect, run_shell 7 | 8 | 9 | def _update_model(from_file: str) -> None: 10 | abspath = Path(__file__).parent / "assets" / "remove_constraint" / from_file 11 | shutil.copy(abspath, "models.py") 12 | 13 | 14 | def test_remove_unique_constraint(tmp_aerich_project): 15 | if Dialect.is_sqlite(): 16 | # TODO: go ahead if sqlite alter-column supported 17 | return 18 | output = run_shell("aerich init -t settings.TORTOISE_ORM") 19 | assert "Success" in output 20 | output = run_shell("aerich init-db") 21 | assert "Success" in output 22 | output = run_shell("pytest _tests.py::test_init_db") 23 | assert "error" not in output.lower() 24 | _update_model("models_2.py") 25 | output = run_shell("aerich migrate") 26 | assert "Success" in output 27 | output = run_shell("aerich upgrade") 28 | assert "Success" in output 29 | output = run_shell("pytest _tests.py::test_models_2") 30 | assert "error" not in output.lower() 31 | _update_model("models_3.py") 32 | output = run_shell("aerich migrate") 33 | assert "Success" in output 34 | output = run_shell("aerich upgrade") 35 | assert "Success" in output 36 | output = run_shell("pytest _tests.py::test_models_3") 37 | assert "error" not in output.lower() 38 | _update_model("models_4.py") 39 | output = run_shell("aerich migrate") 40 | assert "Success" in output 41 | output = run_shell("aerich upgrade") 42 | assert "Success" in output 43 | output = run_shell("pytest _tests.py::test_models_4") 44 | assert "error" not in output.lower() 45 | _update_model("models_5.py") 46 | output = run_shell("aerich migrate --no-input") 47 | assert "Success" in output 48 | output = run_shell("aerich upgrade") 49 | assert "Success" in output 50 | output = run_shell("pytest _tests.py::test_models_5") 51 | assert "error" not in output.lower() 52 | -------------------------------------------------------------------------------- /tests/test_sqlite_migrate.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import contextlib 4 | import os 5 | import platform 6 | import shlex 7 | import shutil 8 | import subprocess 9 | from collections.abc import Generator 10 | from contextlib import contextmanager 11 | from pathlib import Path 12 | 13 | from tests._utils import Dialect, chdir, copy_files 14 | 15 | 16 | def run_aerich(cmd: str) -> subprocess.CompletedProcess | None: 17 | if not cmd.startswith("poetry") and not cmd.startswith("python"): 18 | if not cmd.startswith("aerich"): 19 | cmd = "aerich " + cmd 20 | if platform.system() == "Windows": 21 | cmd = "python -m " + cmd 22 | r = None 23 | with contextlib.suppress(subprocess.TimeoutExpired): 24 | r = subprocess.run(shlex.split(cmd), timeout=2) 25 | return r 26 | 27 | 28 | def run_shell(cmd: str) -> subprocess.CompletedProcess: 29 | envs = dict(os.environ, PYTHONPATH=".") 30 | return subprocess.run(shlex.split(cmd), env=envs) 31 | 32 | 33 | def _get_empty_db() -> Path: 34 | if (db_file := Path("db.sqlite3")).exists(): 35 | db_file.unlink() 36 | return db_file 37 | 38 | 39 | @contextmanager 40 | def prepare_sqlite_project(tmp_path: Path) -> Generator[tuple[Path, str]]: 41 | test_dir = Path(__file__).parent 42 | asset_dir = test_dir / "assets" / "sqlite_migrate" 43 | with chdir(tmp_path): 44 | files = ("models.py", "settings.py", "_tests.py") 45 | copy_files(*(asset_dir / f for f in files), target_dir=Path()) 46 | models_py, settings_py, test_py = (Path(f) for f in files) 47 | copy_files(asset_dir / "conftest_.py", target_dir=Path("conftest.py")) 48 | _get_empty_db() 49 | yield models_py, models_py.read_text("utf-8") 50 | 51 | 52 | def test_close_tortoise_connections_patch(tmp_path: Path) -> None: 53 | if not Dialect.is_sqlite(): 54 | return 55 | with prepare_sqlite_project(tmp_path) as (models_py, models_text): 56 | run_aerich("aerich init -t settings.TORTOISE_ORM") 57 | r = run_aerich("aerich init-db") 58 | assert r is not None 59 | 60 | 61 | def test_sqlite_migrate_alter_indexed_unique(tmp_path: Path) -> None: 62 | if not Dialect.is_sqlite(): 63 | return 64 | with prepare_sqlite_project(tmp_path) as (models_py, models_text): 65 | models_py.write_text(models_text.replace("db_index=False", "db_index=True")) 66 | run_aerich("aerich init -t settings.TORTOISE_ORM") 67 | run_aerich("aerich init-db") 68 | r = run_shell("pytest -s _tests.py::test_allow_duplicate") 69 | assert r.returncode == 0 70 | models_py.write_text(models_text.replace("db_index=False", "unique=True")) 71 | run_aerich("aerich migrate") # migrations/models/1_ 72 | run_aerich("aerich upgrade") 73 | r = run_shell("pytest _tests.py::test_unique_is_true") 74 | assert r.returncode == 0 75 | models_py.write_text(models_text.replace("db_index=False", "db_index=True")) 76 | run_aerich("aerich migrate") # migrations/models/2_ 77 | run_aerich("aerich upgrade") 78 | r = run_shell("pytest -s _tests.py::test_allow_duplicate") 79 | assert r.returncode == 0 80 | 81 | 82 | M2M_WITH_CUSTOM_THROUGH = """ 83 | groups = fields.ManyToManyField("models.Group", through="foo_group") 84 | 85 | class Group(Model): 86 | name = fields.CharField(max_length=60) 87 | 88 | class FooGroup(Model): 89 | foo = fields.ForeignKeyField("models.Foo") 90 | group = fields.ForeignKeyField("models.Group") 91 | is_active = fields.BooleanField(default=False) 92 | 93 | class Meta: 94 | table = "foo_group" 95 | """ 96 | 97 | 98 | def test_sqlite_migrate(tmp_path: Path) -> None: 99 | if not Dialect.is_sqlite(): 100 | return 101 | with prepare_sqlite_project(tmp_path) as (models_py, models_text): 102 | MODELS = models_text 103 | run_aerich("aerich init -t settings.TORTOISE_ORM") 104 | config_file = Path("pyproject.toml") 105 | modify_time = config_file.stat().st_mtime 106 | run_aerich("aerich init-db") 107 | run_aerich("aerich init -t settings.TORTOISE_ORM") 108 | assert modify_time == config_file.stat().st_mtime 109 | r = run_shell("pytest _tests.py::test_allow_duplicate") 110 | assert r.returncode == 0 111 | # Add index 112 | models_py.write_text(MODELS.replace("index=False", "index=True")) 113 | run_aerich("aerich migrate") # migrations/models/1_ 114 | run_aerich("aerich upgrade") 115 | r = run_shell("pytest -s _tests.py::test_allow_duplicate") 116 | assert r.returncode == 0 117 | # Drop index 118 | models_py.write_text(MODELS) 119 | run_aerich("aerich migrate") # migrations/models/2_ 120 | run_aerich("aerich upgrade") 121 | r = run_shell("pytest -s _tests.py::test_allow_duplicate") 122 | assert r.returncode == 0 123 | # Add unique index 124 | models_py.write_text(MODELS.replace("index=False", "index=True, unique=True")) 125 | run_aerich("aerich migrate") # migrations/models/3_ 126 | run_aerich("aerich upgrade") 127 | r = run_shell("pytest _tests.py::test_unique_is_true") 128 | assert r.returncode == 0 129 | # Drop unique index 130 | models_py.write_text(MODELS) 131 | run_aerich("aerich migrate") # migrations/models/4_ 132 | run_aerich("aerich upgrade") 133 | r = run_shell("pytest _tests.py::test_allow_duplicate") 134 | assert r.returncode == 0 135 | # Add field with unique=True 136 | with models_py.open("a") as f: 137 | f.write(" age = fields.IntField(unique=True, default=0)") 138 | run_aerich("aerich migrate") # migrations/models/5_ 139 | run_aerich("aerich upgrade") 140 | r = run_shell("pytest _tests.py::test_add_unique_field") 141 | assert r.returncode == 0 142 | # Drop unique field 143 | models_py.write_text(MODELS) 144 | run_aerich("aerich migrate") # migrations/models/6_ 145 | run_aerich("aerich upgrade") 146 | r = run_shell("pytest -s _tests.py::test_drop_unique_field") 147 | assert r.returncode == 0 148 | 149 | # Initial with indexed field and then drop it 150 | migrations_dir = Path("migrations/models") 151 | shutil.rmtree(migrations_dir) 152 | db_file = _get_empty_db() 153 | models_py.write_text(MODELS + " age = fields.IntField(db_index=True)") 154 | run_aerich("aerich init -t settings.TORTOISE_ORM") 155 | run_aerich("aerich init-db") 156 | migration_file = list(migrations_dir.glob("0_*.py"))[0] 157 | assert "CREATE INDEX" in migration_file.read_text() 158 | r = run_shell("pytest _tests.py::test_with_age_field") 159 | assert r.returncode == 0 160 | models_py.write_text(MODELS) 161 | run_aerich("aerich migrate") 162 | run_aerich("aerich upgrade") 163 | migration_file_1 = list(migrations_dir.glob("1_*.py"))[0] 164 | assert "DROP INDEX" in migration_file_1.read_text() 165 | r = run_shell("pytest _tests.py::test_without_age_field") 166 | assert r.returncode == 0 167 | 168 | # Generate migration file in emptry directory 169 | db_file.unlink() 170 | run_aerich("aerich init-db") 171 | assert not db_file.exists() 172 | for p in migrations_dir.glob("*"): 173 | if p.is_dir(): 174 | shutil.rmtree(p) 175 | else: 176 | p.unlink() 177 | run_aerich("aerich init-db") 178 | assert db_file.exists() 179 | 180 | # init without '[tool]' section in pyproject.toml 181 | config_file = Path("pyproject.toml") 182 | config_file.write_text('[project]\nname = "project"') 183 | run_aerich("init -t settings.TORTOISE_ORM") 184 | assert "[tool.aerich]" in config_file.read_text() 185 | 186 | # add m2m with custom model for through 187 | models_py.write_text(MODELS + M2M_WITH_CUSTOM_THROUGH) 188 | run_aerich("aerich migrate") 189 | run_aerich("aerich upgrade") 190 | migration_file_1 = list(migrations_dir.glob("1_*.py"))[0] 191 | assert "foo_group" in migration_file_1.read_text() 192 | r = run_shell("pytest _tests.py::test_m2m_with_custom_through") 193 | assert r.returncode == 0 194 | 195 | # add m2m field after init-db 196 | new = """ 197 | groups = fields.ManyToManyField("models.Group", through="foo_group", related_name="users") 198 | 199 | class Group(Model): 200 | name = fields.CharField(max_length=60) 201 | """ 202 | _get_empty_db() 203 | if migrations_dir.exists(): 204 | shutil.rmtree(migrations_dir) 205 | models_py.write_text(MODELS) 206 | run_aerich("aerich init-db") 207 | models_py.write_text(MODELS + new) 208 | run_aerich("aerich migrate") 209 | run_aerich("aerich upgrade") 210 | migration_file_1 = list(migrations_dir.glob("1_*.py"))[0] 211 | assert "foo_group" in migration_file_1.read_text() 212 | r = run_shell("pytest _tests.py::test_add_m2m_field_after_init_db") 213 | assert r.returncode == 0 214 | -------------------------------------------------------------------------------- /tests/test_utils.py: -------------------------------------------------------------------------------- 1 | from aerich.utils import get_dict_diff_by_key, import_py_file 2 | 3 | 4 | def test_import_py_file() -> None: 5 | m = import_py_file("aerich/utils.py") 6 | assert getattr(m, "import_py_file", None) 7 | 8 | 9 | class TestDiffFields: 10 | def test_the_same_through_order(self) -> None: 11 | old = [ 12 | {"name": "users", "through": "users_group"}, 13 | {"name": "admins", "through": "admins_group"}, 14 | ] 15 | new = [ 16 | {"name": "members", "through": "users_group"}, 17 | {"name": "admins", "through": "admins_group"}, 18 | ] 19 | diffs = list(get_dict_diff_by_key(old, new)) 20 | assert type(get_dict_diff_by_key(old, new)).__name__ == "generator" 21 | assert len(diffs) == 1 22 | assert diffs == [("change", [0, "name"], ("users", "members"))] 23 | 24 | def test_same_through_with_different_orders(self) -> None: 25 | old = [ 26 | {"name": "users", "through": "users_group"}, 27 | {"name": "admins", "through": "admins_group"}, 28 | ] 29 | new = [ 30 | {"name": "admins", "through": "admins_group"}, 31 | {"name": "members", "through": "users_group"}, 32 | ] 33 | diffs = list(get_dict_diff_by_key(old, new)) 34 | assert len(diffs) == 1 35 | assert diffs == [("change", [0, "name"], ("users", "members"))] 36 | 37 | def test_the_same_field_name_order(self) -> None: 38 | old = [ 39 | {"name": "users", "through": "users_group"}, 40 | {"name": "admins", "through": "admins_group"}, 41 | ] 42 | new = [ 43 | {"name": "users", "through": "user_groups"}, 44 | {"name": "admins", "through": "admin_groups"}, 45 | ] 46 | diffs = list(get_dict_diff_by_key(old, new)) 47 | assert len(diffs) == 4 48 | assert diffs == [ 49 | ("remove", "", [(0, {"name": "users", "through": "users_group"})]), 50 | ("remove", "", [(0, {"name": "admins", "through": "admins_group"})]), 51 | ("add", "", [(0, {"name": "users", "through": "user_groups"})]), 52 | ("add", "", [(0, {"name": "admins", "through": "admin_groups"})]), 53 | ] 54 | 55 | def test_same_field_name_with_different_orders(self) -> None: 56 | old = [ 57 | {"name": "admins", "through": "admins_group"}, 58 | {"name": "users", "through": "users_group"}, 59 | ] 60 | new = [ 61 | {"name": "users", "through": "user_groups"}, 62 | {"name": "admins", "through": "admin_groups"}, 63 | ] 64 | diffs = list(get_dict_diff_by_key(old, new)) 65 | assert len(diffs) == 4 66 | assert diffs == [ 67 | ("remove", "", [(0, {"name": "admins", "through": "admins_group"})]), 68 | ("remove", "", [(0, {"name": "users", "through": "users_group"})]), 69 | ("add", "", [(0, {"name": "users", "through": "user_groups"})]), 70 | ("add", "", [(0, {"name": "admins", "through": "admin_groups"})]), 71 | ] 72 | 73 | def test_drop_one(self) -> None: 74 | old = [ 75 | {"name": "users", "through": "users_group"}, 76 | {"name": "admins", "through": "admins_group"}, 77 | ] 78 | new = [ 79 | {"name": "admins", "through": "admins_group"}, 80 | ] 81 | diffs = list(get_dict_diff_by_key(old, new)) 82 | assert len(diffs) == 1 83 | assert diffs == [("remove", "", [(0, {"name": "users", "through": "users_group"})])] 84 | 85 | def test_add_one(self) -> None: 86 | old = [ 87 | {"name": "admins", "through": "admins_group"}, 88 | ] 89 | new = [ 90 | {"name": "users", "through": "users_group"}, 91 | {"name": "admins", "through": "admins_group"}, 92 | ] 93 | diffs = list(get_dict_diff_by_key(old, new)) 94 | assert len(diffs) == 1 95 | assert diffs == [("add", "", [(0, {"name": "users", "through": "users_group"})])] 96 | 97 | def test_drop_some(self) -> None: 98 | old = [ 99 | {"name": "users", "through": "users_group"}, 100 | {"name": "admins", "through": "admins_group"}, 101 | {"name": "staffs", "through": "staffs_group"}, 102 | ] 103 | new = [ 104 | {"name": "admins", "through": "admins_group"}, 105 | ] 106 | diffs = list(get_dict_diff_by_key(old, new)) 107 | assert len(diffs) == 2 108 | assert diffs == [ 109 | ("remove", "", [(0, {"name": "users", "through": "users_group"})]), 110 | ("remove", "", [(0, {"name": "staffs", "through": "staffs_group"})]), 111 | ] 112 | 113 | def test_add_some(self) -> None: 114 | old = [ 115 | {"name": "staffs", "through": "staffs_group"}, 116 | ] 117 | new = [ 118 | {"name": "users", "through": "users_group"}, 119 | {"name": "admins", "through": "admins_group"}, 120 | {"name": "staffs", "through": "staffs_group"}, 121 | ] 122 | diffs = list(get_dict_diff_by_key(old, new)) 123 | assert len(diffs) == 2 124 | assert diffs == [ 125 | ("add", "", [(0, {"name": "users", "through": "users_group"})]), 126 | ("add", "", [(0, {"name": "admins", "through": "admins_group"})]), 127 | ] 128 | 129 | def test_some_through_unchanged(self) -> None: 130 | old = [ 131 | {"name": "staffs", "through": "staffs_group"}, 132 | {"name": "admins", "through": "admins_group"}, 133 | ] 134 | new = [ 135 | {"name": "users", "through": "users_group"}, 136 | {"name": "admins_new", "through": "admins_group"}, 137 | {"name": "staffs_new", "through": "staffs_group"}, 138 | ] 139 | diffs = list(get_dict_diff_by_key(old, new)) 140 | assert len(diffs) == 3 141 | assert diffs == [ 142 | ("change", [0, "name"], ("staffs", "staffs_new")), 143 | ("change", [0, "name"], ("admins", "admins_new")), 144 | ("add", "", [(0, {"name": "users", "through": "users_group"})]), 145 | ] 146 | 147 | def test_some_unchanged_without_drop_or_add(self) -> None: 148 | old = [ 149 | {"name": "staffs", "through": "staffs_group"}, 150 | {"name": "admins", "through": "admins_group"}, 151 | {"name": "users", "through": "users_group"}, 152 | ] 153 | new = [ 154 | {"name": "users_new", "through": "users_group"}, 155 | {"name": "admins_new", "through": "admins_group"}, 156 | {"name": "staffs_new", "through": "staffs_group"}, 157 | ] 158 | diffs = list(get_dict_diff_by_key(old, new)) 159 | assert len(diffs) == 3 160 | assert diffs == [ 161 | ("change", [0, "name"], ("staffs", "staffs_new")), 162 | ("change", [0, "name"], ("admins", "admins_new")), 163 | ("change", [0, "name"], ("users", "users_new")), 164 | ] 165 | --------------------------------------------------------------------------------