├── .coveragerc ├── .github ├── FUNDING.yml ├── ISSUE_TEMPLATE │ ├── bug_report.md │ ├── config.yml │ └── use_case.md ├── SECURITY.md ├── pull_request_template.md └── workflows │ ├── run-on-pr.yaml │ └── run-test.yaml ├── .gitignore ├── .gitreview ├── .pre-commit-config.yaml ├── CHANGES ├── LICENSE ├── MANIFEST.in ├── README.rst ├── README.unittests.rst ├── alembic ├── __init__.py ├── __main__.py ├── autogenerate │ ├── __init__.py │ ├── api.py │ ├── compare.py │ ├── render.py │ └── rewriter.py ├── command.py ├── config.py ├── context.py ├── context.pyi ├── ddl │ ├── __init__.py │ ├── _autogen.py │ ├── base.py │ ├── impl.py │ ├── mssql.py │ ├── mysql.py │ ├── oracle.py │ ├── postgresql.py │ └── sqlite.py ├── environment.py ├── migration.py ├── op.py ├── op.pyi ├── operations │ ├── __init__.py │ ├── base.py │ ├── batch.py │ ├── ops.py │ ├── schemaobj.py │ └── toimpl.py ├── py.typed ├── runtime │ ├── __init__.py │ ├── environment.py │ └── migration.py ├── script │ ├── __init__.py │ ├── base.py │ ├── revision.py │ └── write_hooks.py ├── templates │ ├── async │ │ ├── README │ │ ├── alembic.ini.mako │ │ ├── env.py │ │ └── script.py.mako │ ├── generic │ │ ├── README │ │ ├── alembic.ini.mako │ │ ├── env.py │ │ └── script.py.mako │ ├── multidb │ │ ├── README │ │ ├── alembic.ini.mako │ │ ├── env.py │ │ └── script.py.mako │ └── pyproject │ │ ├── README │ │ ├── alembic.ini.mako │ │ ├── env.py │ │ ├── pyproject.toml.mako │ │ └── script.py.mako ├── testing │ ├── __init__.py │ ├── assertions.py │ ├── env.py │ ├── fixtures.py │ ├── plugin │ │ ├── __init__.py │ │ └── bootstrap.py │ ├── requirements.py │ ├── schemacompare.py │ ├── suite │ │ ├── __init__.py │ │ ├── _autogen_fixtures.py │ │ ├── test_autogen_comments.py │ │ ├── test_autogen_computed.py │ │ ├── test_autogen_diffs.py │ │ ├── test_autogen_fks.py │ │ ├── test_autogen_identity.py │ │ ├── test_environment.py │ │ └── test_op.py │ ├── util.py │ └── warnings.py └── util │ ├── __init__.py │ ├── compat.py │ ├── editor.py │ ├── exc.py │ ├── langhelpers.py │ ├── messaging.py │ ├── pyfiles.py │ └── sqla_compat.py ├── docs └── build │ ├── Makefile │ ├── _static │ ├── nature_override.css │ └── site_custom_css.css │ ├── _templates │ └── site_custom_sidebars.html │ ├── api │ ├── api_overview.png │ ├── autogenerate.rst │ ├── commands.rst │ ├── config.rst │ ├── ddl.rst │ ├── index.rst │ ├── operations.rst │ ├── overview.rst │ ├── runtime.rst │ └── script.rst │ ├── assets │ └── api_overview.graffle │ ├── autogenerate.rst │ ├── batch.rst │ ├── branches.rst │ ├── changelog.rst │ ├── conf.py │ ├── cookbook.rst │ ├── front.rst │ ├── index.rst │ ├── make.bat │ ├── naming.rst │ ├── offline.rst │ ├── ops.rst │ ├── requirements.txt │ ├── tutorial.rst │ └── unreleased │ └── README.txt ├── pyproject.toml ├── reap_dbs.py ├── setup.cfg ├── setup.py ├── tests ├── __init__.py ├── _large_map.py ├── conftest.py ├── requirements.py ├── test_autogen_composition.py ├── test_autogen_diffs.py ├── test_autogen_indexes.py ├── test_autogen_render.py ├── test_batch.py ├── test_bulk_insert.py ├── test_command.py ├── test_config.py ├── test_editor.py ├── test_environment.py ├── test_external_dialect.py ├── test_impl.py ├── test_messaging.py ├── test_mssql.py ├── test_mysql.py ├── test_offline_environment.py ├── test_op.py ├── test_op_naming_convention.py ├── test_oracle.py ├── test_post_write.py ├── test_postgresql.py ├── test_revision.py ├── test_script_consumption.py ├── test_script_production.py ├── test_sqlite.py ├── test_stubs.py ├── test_suite.py ├── test_version_table.py └── test_version_traversal.py ├── tools └── write_pyi.py └── tox.ini /.coveragerc: -------------------------------------------------------------------------------- 1 | [run] 2 | include=alembic/* 3 | 4 | [report] 5 | omit=alembic/testing/* -------------------------------------------------------------------------------- /.github/FUNDING.yml: -------------------------------------------------------------------------------- 1 | # These are supported funding model platforms 2 | 3 | github: alembic 4 | patreon: zzzeek 5 | tidelift: "pypi/SQLAlchemy" 6 | 7 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/bug_report.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Bug report 3 | about: Create a report to help us improve 4 | title: '' 5 | labels: requires triage 6 | assignees: '' 7 | 8 | --- 9 | 10 | **Describe the bug** 11 | 12 | 13 | **Expected behavior** 14 | 15 | 16 | **To Reproduce** 17 | Please try to provide a [Minimal, Complete, and Verifiable](http://stackoverflow.com/help/mcve) example, with the migration script and/or the SQLAlchemy tables or models involved. 18 | See also [Reporting Bugs](https://www.sqlalchemy.org/participate.html#bugs) on the website. 19 | 20 | ```py 21 | # Insert code here 22 | ``` 23 | 24 | **Error** 25 | 26 | ``` 27 | # Copy error here. Please include the full stack trace. 28 | ``` 29 | 30 | **Versions.** 31 | - OS: 32 | - Python: 33 | - Alembic: 34 | - SQLAlchemy: 35 | - Database: 36 | - DBAPI: 37 | 38 | **Additional context** 39 | 40 | 41 | **Have a nice day!** 42 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/config.yml: -------------------------------------------------------------------------------- 1 | blank_issues_enabled: false 2 | contact_links: 3 | - name: Usage Questions (GitHub Discussions) 4 | url: https://github.com/sqlalchemy/alembic/discussions/new?category=Usage-Questions 5 | about: Questions and Answers for Alembic Users 6 | - name: Live Chat on Gitter 7 | url: https://gitter.im/sqlalchemy/community 8 | about: Searchable Web-Based Chat 9 | - name: Ideas / Feature Proposal (GitHub Discussions) 10 | url: https://github.com/sqlalchemy/alembic/discussions/new?category=Ideas 11 | about: Use this for initial discussion for new features and suggestions 12 | - name: SQLAlchemy Community Guide 13 | url: https://www.sqlalchemy.org/support.html 14 | about: Start here for an overview of SQLAlchemy's support network and posting guidelines 15 | 16 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/use_case.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Request a new use case 3 | about: Support for new SQL syntaxes, database capabilities, DBAPIs and DBAPI features 4 | title: '' 5 | labels: requires triage,use case 6 | assignees: '' 7 | 8 | --- 9 | 10 | **Describe the use case** 11 | 12 | 13 | **Databases / Backends / Drivers targeted** 14 | 15 | 16 | 17 | **Example Use** 18 | 19 | 20 | 21 | **Additional context** 22 | 23 | 24 | **Have a nice day!** 25 | -------------------------------------------------------------------------------- /.github/SECURITY.md: -------------------------------------------------------------------------------- 1 | # Security Policy 2 | 3 | ## Reporting a Vulnerability 4 | 5 | SQLAlchemy participates in the Tidelift security infrastructure for reporting 6 | potential vulnerabilities reponsibly. Please follow the guidelines at: 7 | 8 | https://tidelift.com/docs/security 9 | 10 | in order to report a security issue. 11 | 12 | 13 | -------------------------------------------------------------------------------- /.github/pull_request_template.md: -------------------------------------------------------------------------------- 1 | 2 | 3 | ### Description 4 | 5 | 6 | ### Checklist 7 | 10 | 11 | This pull request is: 12 | 13 | - [ ] A documentation / typographical error fix 14 | - Good to go, no issue or tests are needed 15 | - [ ] A short code fix 16 | - please include the issue number, and create an issue if none exists, which 17 | must include a complete example of the issue. one line code fixes without an 18 | issue and demonstration will not be accepted. 19 | - Please include: `Fixes: #` in the commit message 20 | - please include tests. one line code fixes without tests will not be accepted. 21 | - [ ] A new feature implementation 22 | - please include the issue number, and create an issue if none exists, which must 23 | include a complete example of how the feature would look. 24 | - Please include: `Fixes: #` in the commit message 25 | - please include tests. 26 | 27 | **Have a nice day!** 28 | -------------------------------------------------------------------------------- /.github/workflows/run-on-pr.yaml: -------------------------------------------------------------------------------- 1 | name: Run tests on a pr 2 | 3 | on: 4 | # run on pull request to main excluding changes that are only on doc or example folders 5 | pull_request: 6 | branches: 7 | - main 8 | paths-ignore: 9 | - "docs/**" 10 | 11 | env: 12 | # global env to all steps 13 | TOX_WORKERS: -n2 14 | 15 | permissions: 16 | contents: read 17 | 18 | jobs: 19 | run-test-amd64: 20 | name: ${{ matrix.python-version }}-${{ matrix.sqlalchemy }}-${{ matrix.os }} 21 | runs-on: ${{ matrix.os }} 22 | strategy: 23 | # run this job using this matrix, excluding some combinations below. 24 | matrix: 25 | os: 26 | - "ubuntu-22.04" 27 | python-version: 28 | - "3.12" 29 | sqlalchemy: 30 | - sqla14 31 | - sqla20 32 | - sqlamain 33 | # abort all jobs as soon as one fails 34 | fail-fast: true 35 | 36 | # steps to run in each job. Some are github actions, others run shell commands 37 | steps: 38 | - name: Checkout repo 39 | uses: actions/checkout@v4 40 | 41 | - name: Set up python 42 | uses: actions/setup-python@v4 43 | with: 44 | python-version: ${{ matrix.python-version }} 45 | architecture: ${{ matrix.architecture }} 46 | 47 | - name: Install dependencies 48 | run: | 49 | python -m pip install --upgrade pip 50 | pip install --upgrade tox setuptools 51 | pip list 52 | 53 | - name: Run tests 54 | run: tox -e py-${{ matrix.sqlalchemy }} 55 | 56 | run-pep484: 57 | name: pep484-${{ matrix.python-version }}-${{ matrix.os }} 58 | runs-on: ${{ matrix.os }} 59 | strategy: 60 | matrix: 61 | os: 62 | - "ubuntu-latest" 63 | python-version: 64 | - "3.12" 65 | - "3.13" 66 | 67 | fail-fast: false 68 | 69 | steps: 70 | - name: Checkout repo 71 | uses: actions/checkout@v4 72 | 73 | - name: Set up python 74 | uses: actions/setup-python@v4 75 | with: 76 | python-version: ${{ matrix.python-version }} 77 | architecture: ${{ matrix.architecture }} 78 | 79 | - name: Install dependencies 80 | run: | 81 | python -m pip install --upgrade pip 82 | pip install --upgrade tox setuptools 83 | pip list 84 | 85 | - name: Run pep484 86 | run: tox -e pep484 87 | -------------------------------------------------------------------------------- /.github/workflows/run-test.yaml: -------------------------------------------------------------------------------- 1 | name: Run tests 2 | 3 | on: 4 | # run on push in main or rel_* branches excluding changes are only on doc or example folders 5 | push: 6 | branches: 7 | - main 8 | - "rel_*" 9 | # branches used to test the workflow 10 | - "workflow_test_*" 11 | paths-ignore: 12 | - "docs/**" 13 | 14 | env: 15 | # global env to all steps 16 | TOX_WORKERS: -n2 17 | 18 | permissions: 19 | contents: read 20 | 21 | jobs: 22 | run-test: 23 | name: ${{ matrix.python-version }}-${{ matrix.sqlalchemy }}-${{ matrix.os }} 24 | runs-on: ${{ matrix.os }} 25 | strategy: 26 | # run this job using this matrix, excluding some combinations below. 27 | matrix: 28 | os: 29 | - "ubuntu-22.04" 30 | - "windows-latest" 31 | - "macos-latest" 32 | python-version: 33 | - "3.9" 34 | - "3.10" 35 | - "3.11" 36 | - "3.12" 37 | - "3.13" 38 | sqlalchemy: 39 | - sqla14 40 | - sqla20 41 | - sqlamain 42 | exclude: 43 | # sqla14 does not support 3.13+ 44 | - sqlalchemy: sqla14 45 | python-version: "3.13" 46 | 47 | fail-fast: false 48 | 49 | # steps to run in each job. Some are github actions, others run shell commands 50 | steps: 51 | - name: Checkout repo 52 | uses: actions/checkout@v4 53 | 54 | - name: Set up python 55 | uses: actions/setup-python@v4 56 | with: 57 | python-version: ${{ matrix.python-version }} 58 | architecture: ${{ matrix.architecture }} 59 | 60 | - name: Install dependencies 61 | run: | 62 | python -m pip install --upgrade pip 63 | pip install --upgrade tox setuptools 64 | pip list 65 | 66 | - name: Run tests 67 | run: tox -e py-${{ matrix.sqlalchemy }} 68 | 69 | run-pep484: 70 | name: pep484-${{ matrix.python-version }}-${{ matrix.os }} 71 | runs-on: ${{ matrix.os }} 72 | strategy: 73 | matrix: 74 | os: 75 | - "ubuntu-latest" 76 | python-version: 77 | - "3.12" 78 | - "3.13" 79 | 80 | fail-fast: false 81 | 82 | steps: 83 | - name: Checkout repo 84 | uses: actions/checkout@v4 85 | 86 | - name: Set up python 87 | uses: actions/setup-python@v4 88 | with: 89 | python-version: ${{ matrix.python-version }} 90 | architecture: ${{ matrix.architecture }} 91 | 92 | - name: Install dependencies 93 | run: | 94 | python -m pip install --upgrade pip 95 | pip install --upgrade tox setuptools 96 | pip list 97 | 98 | - name: Run tox pep484 99 | run: tox -e pep484 100 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *.pyc 2 | *.pyo 3 | /build/ 4 | dist/ 5 | /docs/build/output/ 6 | *.orig 7 | alembic.ini 8 | .venv 9 | /venv/ 10 | *.egg-info 11 | .coverage 12 | coverage.xml 13 | .tox 14 | *.patch 15 | /scratch 16 | /scratch_test_* 17 | /test_schema.db 18 | /test.cfg 19 | .idea/ 20 | .vscode/ 21 | .pytest_cache/ 22 | /docs/build/_build/ 23 | /pysqlite_test_schema.db 24 | *.sqlite3 25 | .mypy_cache/ 26 | -------------------------------------------------------------------------------- /.gitreview: -------------------------------------------------------------------------------- 1 | [gerrit] 2 | host=gerrit.sqlalchemy.org 3 | project=sqlalchemy/alembic 4 | defaultbranch=main 5 | port=29418 6 | 7 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | # See https://pre-commit.com for more information 2 | # See https://pre-commit.com/hooks.html for more hooks 3 | repos: 4 | - repo: https://github.com/python/black 5 | rev: 25.1.0 6 | hooks: 7 | - id: black 8 | 9 | - repo: https://github.com/sqlalchemyorg/zimports 10 | rev: v0.6.0 11 | hooks: 12 | - id: zimports 13 | args: 14 | - --keep-unused-type-checking 15 | 16 | - repo: https://github.com/pycqa/flake8 17 | rev: 7.2.0 18 | hooks: 19 | - id: flake8 20 | additional_dependencies: 21 | - flake8-import-order 22 | - flake8-import-single==0.1.5 23 | - flake8-builtins 24 | - flake8-docstrings 25 | - flake8-rst-docstrings 26 | - pydocstyle<4.0.0 27 | - pygments 28 | 29 | 30 | 31 | 32 | -------------------------------------------------------------------------------- /CHANGES: -------------------------------------------------------------------------------- 1 | ===== 2 | MOVED 3 | ===== 4 | 5 | Please see: 6 | 7 | /docs/changelog.html 8 | 9 | /docs/build/changelog.rst 10 | 11 | or 12 | 13 | http://alembic.sqlalchemy.org/en/latest/changelog.html 14 | 15 | for the current CHANGES. 16 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright 2009-2025 Michael Bayer. 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining a copy of 4 | this software and associated documentation files (the "Software"), to deal in 5 | the Software without restriction, including without limitation the rights to 6 | use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies 7 | of the Software, and to permit persons to whom the Software is furnished to do 8 | so, subject to the following conditions: 9 | 10 | The above copyright notice and this permission notice shall be included in all 11 | copies or substantial portions of the Software. 12 | 13 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 14 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 15 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 16 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 17 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 18 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 19 | SOFTWARE. 20 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | recursive-include docs *.html *.css *.txt *.js *.jpg *.png *.py Makefile *.rst *.sty 2 | recursive-include tests *.py *.dat *.pyi 3 | recursive-include alembic/templates *.mako README *.py *.pyi 4 | recursive-include alembic *.py *.pyi py.typed 5 | recursive-include tools *.py 6 | 7 | include README* LICENSE CHANGES* tox.ini 8 | 9 | prune docs/build/output 10 | 11 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | Alembic is a database migrations tool written by the author 2 | of `SQLAlchemy `_. A migrations tool 3 | offers the following functionality: 4 | 5 | * Can emit ALTER statements to a database in order to change 6 | the structure of tables and other constructs 7 | * Provides a system whereby "migration scripts" may be constructed; 8 | each script indicates a particular series of steps that can "upgrade" a 9 | target database to a new version, and optionally a series of steps that can 10 | "downgrade" similarly, doing the same steps in reverse. 11 | * Allows the scripts to execute in some sequential manner. 12 | 13 | The goals of Alembic are: 14 | 15 | * Very open ended and transparent configuration and operation. A new 16 | Alembic environment is generated from a set of templates which is selected 17 | among a set of options when setup first occurs. The templates then deposit a 18 | series of scripts that define fully how database connectivity is established 19 | and how migration scripts are invoked; the migration scripts themselves are 20 | generated from a template within that series of scripts. The scripts can 21 | then be further customized to define exactly how databases will be 22 | interacted with and what structure new migration files should take. 23 | * Full support for transactional DDL. The default scripts ensure that all 24 | migrations occur within a transaction - for those databases which support 25 | this (Postgresql, Microsoft SQL Server), migrations can be tested with no 26 | need to manually undo changes upon failure. 27 | * Minimalist script construction. Basic operations like renaming 28 | tables/columns, adding/removing columns, changing column attributes can be 29 | performed through one line commands like alter_column(), rename_table(), 30 | add_constraint(). There is no need to recreate full SQLAlchemy Table 31 | structures for simple operations like these - the functions themselves 32 | generate minimalist schema structures behind the scenes to achieve the given 33 | DDL sequence. 34 | * "auto generation" of migrations. While real world migrations are far more 35 | complex than what can be automatically determined, Alembic can still 36 | eliminate the initial grunt work in generating new migration directives 37 | from an altered schema. The ``--autogenerate`` feature will inspect the 38 | current status of a database using SQLAlchemy's schema inspection 39 | capabilities, compare it to the current state of the database model as 40 | specified in Python, and generate a series of "candidate" migrations, 41 | rendering them into a new migration script as Python directives. The 42 | developer then edits the new file, adding additional directives and data 43 | migrations as needed, to produce a finished migration. Table and column 44 | level changes can be detected, with constraints and indexes to follow as 45 | well. 46 | * Full support for migrations generated as SQL scripts. Those of us who 47 | work in corporate environments know that direct access to DDL commands on a 48 | production database is a rare privilege, and DBAs want textual SQL scripts. 49 | Alembic's usage model and commands are oriented towards being able to run a 50 | series of migrations into a textual output file as easily as it runs them 51 | directly to a database. Care must be taken in this mode to not invoke other 52 | operations that rely upon in-memory SELECTs of rows - Alembic tries to 53 | provide helper constructs like bulk_insert() to help with data-oriented 54 | operations that are compatible with script-based DDL. 55 | * Non-linear, dependency-graph versioning. Scripts are given UUID 56 | identifiers similarly to a DVCS, and the linkage of one script to the next 57 | is achieved via human-editable markers within the scripts themselves. 58 | The structure of a set of migration files is considered as a 59 | directed-acyclic graph, meaning any migration file can be dependent 60 | on any other arbitrary set of migration files, or none at 61 | all. Through this open-ended system, migration files can be organized 62 | into branches, multiple roots, and mergepoints, without restriction. 63 | Commands are provided to produce new branches, roots, and merges of 64 | branches automatically. 65 | * Provide a library of ALTER constructs that can be used by any SQLAlchemy 66 | application. The DDL constructs build upon SQLAlchemy's own DDLElement base 67 | and can be used standalone by any application or script. 68 | * At long last, bring SQLite and its inability to ALTER things into the fold, 69 | but in such a way that SQLite's very special workflow needs are accommodated 70 | in an explicit way that makes the most of a bad situation, through the 71 | concept of a "batch" migration, where multiple changes to a table can 72 | be batched together to form a series of instructions for a single, subsequent 73 | "move-and-copy" workflow. You can even use "move-and-copy" workflow for 74 | other databases, if you want to recreate a table in the background 75 | on a busy system. 76 | 77 | Documentation and status of Alembic is at https://alembic.sqlalchemy.org/ 78 | 79 | The SQLAlchemy Project 80 | ====================== 81 | 82 | Alembic is part of the `SQLAlchemy Project `_ and 83 | adheres to the same standards and conventions as the core project. 84 | 85 | Development / Bug reporting / Pull requests 86 | ___________________________________________ 87 | 88 | Please refer to the 89 | `SQLAlchemy Community Guide `_ for 90 | guidelines on coding and participating in this project. 91 | 92 | Code of Conduct 93 | _______________ 94 | 95 | Above all, SQLAlchemy places great emphasis on polite, thoughtful, and 96 | constructive communication between users and developers. 97 | Please see our current Code of Conduct at 98 | `Code of Conduct `_. 99 | 100 | License 101 | ======= 102 | 103 | Alembic is distributed under the `MIT license 104 | `_. 105 | -------------------------------------------------------------------------------- /alembic/__init__.py: -------------------------------------------------------------------------------- 1 | from . import context 2 | from . import op 3 | 4 | __version__ = "1.16.2" 5 | -------------------------------------------------------------------------------- /alembic/__main__.py: -------------------------------------------------------------------------------- 1 | from .config import main 2 | 3 | if __name__ == "__main__": 4 | main(prog="alembic") 5 | -------------------------------------------------------------------------------- /alembic/autogenerate/__init__.py: -------------------------------------------------------------------------------- 1 | from .api import _render_migration_diffs as _render_migration_diffs 2 | from .api import compare_metadata as compare_metadata 3 | from .api import produce_migrations as produce_migrations 4 | from .api import render_python_code as render_python_code 5 | from .api import RevisionContext as RevisionContext 6 | from .compare import _produce_net_changes as _produce_net_changes 7 | from .compare import comparators as comparators 8 | from .render import render_op_text as render_op_text 9 | from .render import renderers as renderers 10 | from .rewriter import Rewriter as Rewriter 11 | -------------------------------------------------------------------------------- /alembic/context.py: -------------------------------------------------------------------------------- 1 | from .runtime.environment import EnvironmentContext 2 | 3 | # create proxy functions for 4 | # each method on the EnvironmentContext class. 5 | EnvironmentContext.create_module_class_proxy(globals(), locals()) 6 | -------------------------------------------------------------------------------- /alembic/ddl/__init__.py: -------------------------------------------------------------------------------- 1 | from . import mssql 2 | from . import mysql 3 | from . import oracle 4 | from . import postgresql 5 | from . import sqlite 6 | from .impl import DefaultImpl as DefaultImpl 7 | -------------------------------------------------------------------------------- /alembic/ddl/oracle.py: -------------------------------------------------------------------------------- 1 | # mypy: allow-untyped-defs, allow-incomplete-defs, allow-untyped-calls 2 | # mypy: no-warn-return-any, allow-any-generics 3 | 4 | from __future__ import annotations 5 | 6 | import re 7 | from typing import Any 8 | from typing import Optional 9 | from typing import TYPE_CHECKING 10 | 11 | from sqlalchemy.sql import sqltypes 12 | 13 | from .base import AddColumn 14 | from .base import alter_table 15 | from .base import ColumnComment 16 | from .base import ColumnDefault 17 | from .base import ColumnName 18 | from .base import ColumnNullable 19 | from .base import ColumnType 20 | from .base import format_column_name 21 | from .base import format_server_default 22 | from .base import format_table_name 23 | from .base import format_type 24 | from .base import IdentityColumnDefault 25 | from .base import RenameTable 26 | from .impl import DefaultImpl 27 | from ..util.sqla_compat import compiles 28 | 29 | if TYPE_CHECKING: 30 | from sqlalchemy.dialects.oracle.base import OracleDDLCompiler 31 | from sqlalchemy.engine.cursor import CursorResult 32 | from sqlalchemy.sql.schema import Column 33 | 34 | 35 | class OracleImpl(DefaultImpl): 36 | __dialect__ = "oracle" 37 | transactional_ddl = False 38 | batch_separator = "/" 39 | command_terminator = "" 40 | type_synonyms = DefaultImpl.type_synonyms + ( 41 | {"VARCHAR", "VARCHAR2"}, 42 | {"BIGINT", "INTEGER", "SMALLINT", "DECIMAL", "NUMERIC", "NUMBER"}, 43 | {"DOUBLE", "FLOAT", "DOUBLE_PRECISION"}, 44 | ) 45 | identity_attrs_ignore = () 46 | 47 | def __init__(self, *arg, **kw) -> None: 48 | super().__init__(*arg, **kw) 49 | self.batch_separator = self.context_opts.get( 50 | "oracle_batch_separator", self.batch_separator 51 | ) 52 | 53 | def _exec(self, construct: Any, *args, **kw) -> Optional[CursorResult]: 54 | result = super()._exec(construct, *args, **kw) 55 | if self.as_sql and self.batch_separator: 56 | self.static_output(self.batch_separator) 57 | return result 58 | 59 | def compare_server_default( 60 | self, 61 | inspector_column, 62 | metadata_column, 63 | rendered_metadata_default, 64 | rendered_inspector_default, 65 | ): 66 | if rendered_metadata_default is not None: 67 | rendered_metadata_default = re.sub( 68 | r"^\((.+)\)$", r"\1", rendered_metadata_default 69 | ) 70 | 71 | rendered_metadata_default = re.sub( 72 | r"^\"?'(.+)'\"?$", r"\1", rendered_metadata_default 73 | ) 74 | 75 | if rendered_inspector_default is not None: 76 | rendered_inspector_default = re.sub( 77 | r"^\((.+)\)$", r"\1", rendered_inspector_default 78 | ) 79 | 80 | rendered_inspector_default = re.sub( 81 | r"^\"?'(.+)'\"?$", r"\1", rendered_inspector_default 82 | ) 83 | 84 | rendered_inspector_default = rendered_inspector_default.strip() 85 | return rendered_inspector_default != rendered_metadata_default 86 | 87 | def emit_begin(self) -> None: 88 | self._exec("SET TRANSACTION READ WRITE") 89 | 90 | def emit_commit(self) -> None: 91 | self._exec("COMMIT") 92 | 93 | 94 | @compiles(AddColumn, "oracle") 95 | def visit_add_column( 96 | element: AddColumn, compiler: OracleDDLCompiler, **kw 97 | ) -> str: 98 | return "%s %s" % ( 99 | alter_table(compiler, element.table_name, element.schema), 100 | add_column(compiler, element.column, **kw), 101 | ) 102 | 103 | 104 | @compiles(ColumnNullable, "oracle") 105 | def visit_column_nullable( 106 | element: ColumnNullable, compiler: OracleDDLCompiler, **kw 107 | ) -> str: 108 | return "%s %s %s" % ( 109 | alter_table(compiler, element.table_name, element.schema), 110 | alter_column(compiler, element.column_name), 111 | "NULL" if element.nullable else "NOT NULL", 112 | ) 113 | 114 | 115 | @compiles(ColumnType, "oracle") 116 | def visit_column_type( 117 | element: ColumnType, compiler: OracleDDLCompiler, **kw 118 | ) -> str: 119 | return "%s %s %s" % ( 120 | alter_table(compiler, element.table_name, element.schema), 121 | alter_column(compiler, element.column_name), 122 | "%s" % format_type(compiler, element.type_), 123 | ) 124 | 125 | 126 | @compiles(ColumnName, "oracle") 127 | def visit_column_name( 128 | element: ColumnName, compiler: OracleDDLCompiler, **kw 129 | ) -> str: 130 | return "%s RENAME COLUMN %s TO %s" % ( 131 | alter_table(compiler, element.table_name, element.schema), 132 | format_column_name(compiler, element.column_name), 133 | format_column_name(compiler, element.newname), 134 | ) 135 | 136 | 137 | @compiles(ColumnDefault, "oracle") 138 | def visit_column_default( 139 | element: ColumnDefault, compiler: OracleDDLCompiler, **kw 140 | ) -> str: 141 | return "%s %s %s" % ( 142 | alter_table(compiler, element.table_name, element.schema), 143 | alter_column(compiler, element.column_name), 144 | ( 145 | "DEFAULT %s" % format_server_default(compiler, element.default) 146 | if element.default is not None 147 | else "DEFAULT NULL" 148 | ), 149 | ) 150 | 151 | 152 | @compiles(ColumnComment, "oracle") 153 | def visit_column_comment( 154 | element: ColumnComment, compiler: OracleDDLCompiler, **kw 155 | ) -> str: 156 | ddl = "COMMENT ON COLUMN {table_name}.{column_name} IS {comment}" 157 | 158 | comment = compiler.sql_compiler.render_literal_value( 159 | (element.comment if element.comment is not None else ""), 160 | sqltypes.String(), 161 | ) 162 | 163 | return ddl.format( 164 | table_name=element.table_name, 165 | column_name=element.column_name, 166 | comment=comment, 167 | ) 168 | 169 | 170 | @compiles(RenameTable, "oracle") 171 | def visit_rename_table( 172 | element: RenameTable, compiler: OracleDDLCompiler, **kw 173 | ) -> str: 174 | return "%s RENAME TO %s" % ( 175 | alter_table(compiler, element.table_name, element.schema), 176 | format_table_name(compiler, element.new_table_name, None), 177 | ) 178 | 179 | 180 | def alter_column(compiler: OracleDDLCompiler, name: str) -> str: 181 | return "MODIFY %s" % format_column_name(compiler, name) 182 | 183 | 184 | def add_column(compiler: OracleDDLCompiler, column: Column[Any], **kw) -> str: 185 | return "ADD %s" % compiler.get_column_specification(column, **kw) 186 | 187 | 188 | @compiles(IdentityColumnDefault, "oracle") 189 | def visit_identity_column( 190 | element: IdentityColumnDefault, compiler: OracleDDLCompiler, **kw 191 | ): 192 | text = "%s %s " % ( 193 | alter_table(compiler, element.table_name, element.schema), 194 | alter_column(compiler, element.column_name), 195 | ) 196 | if element.default is None: 197 | # drop identity 198 | text += "DROP IDENTITY" 199 | return text 200 | else: 201 | text += compiler.visit_identity_column(element.default) 202 | return text 203 | -------------------------------------------------------------------------------- /alembic/environment.py: -------------------------------------------------------------------------------- 1 | from .runtime.environment import * # noqa 2 | -------------------------------------------------------------------------------- /alembic/migration.py: -------------------------------------------------------------------------------- 1 | from .runtime.migration import * # noqa 2 | -------------------------------------------------------------------------------- /alembic/op.py: -------------------------------------------------------------------------------- 1 | from .operations.base import Operations 2 | 3 | # create proxy functions for 4 | # each method on the Operations class. 5 | Operations.create_module_class_proxy(globals(), locals()) 6 | -------------------------------------------------------------------------------- /alembic/operations/__init__.py: -------------------------------------------------------------------------------- 1 | from . import toimpl 2 | from .base import AbstractOperations 3 | from .base import BatchOperations 4 | from .base import Operations 5 | from .ops import MigrateOperation 6 | from .ops import MigrationScript 7 | 8 | 9 | __all__ = [ 10 | "AbstractOperations", 11 | "Operations", 12 | "BatchOperations", 13 | "MigrateOperation", 14 | "MigrationScript", 15 | ] 16 | -------------------------------------------------------------------------------- /alembic/py.typed: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/zzzeek/alembic/f0d65770c14dcaa19ec2c9270999bed2ef99a311/alembic/py.typed -------------------------------------------------------------------------------- /alembic/runtime/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/zzzeek/alembic/f0d65770c14dcaa19ec2c9270999bed2ef99a311/alembic/runtime/__init__.py -------------------------------------------------------------------------------- /alembic/script/__init__.py: -------------------------------------------------------------------------------- 1 | from .base import Script 2 | from .base import ScriptDirectory 3 | 4 | __all__ = ["ScriptDirectory", "Script"] 5 | -------------------------------------------------------------------------------- /alembic/script/write_hooks.py: -------------------------------------------------------------------------------- 1 | # mypy: allow-untyped-defs, allow-incomplete-defs, allow-untyped-calls 2 | # mypy: no-warn-return-any, allow-any-generics 3 | 4 | from __future__ import annotations 5 | 6 | import os 7 | import shlex 8 | import subprocess 9 | import sys 10 | from typing import Any 11 | from typing import Callable 12 | from typing import Dict 13 | from typing import List 14 | from typing import Optional 15 | from typing import TYPE_CHECKING 16 | from typing import Union 17 | 18 | from .. import util 19 | from ..util import compat 20 | from ..util.pyfiles import _preserving_path_as_str 21 | 22 | if TYPE_CHECKING: 23 | from ..config import PostWriteHookConfig 24 | 25 | REVISION_SCRIPT_TOKEN = "REVISION_SCRIPT_FILENAME" 26 | 27 | _registry: dict = {} 28 | 29 | 30 | def register(name: str) -> Callable: 31 | """A function decorator that will register that function as a write hook. 32 | 33 | See the documentation linked below for an example. 34 | 35 | .. seealso:: 36 | 37 | :ref:`post_write_hooks_custom` 38 | 39 | 40 | """ 41 | 42 | def decorate(fn): 43 | _registry[name] = fn 44 | return fn 45 | 46 | return decorate 47 | 48 | 49 | def _invoke( 50 | name: str, 51 | revision_path: Union[str, os.PathLike[str]], 52 | options: PostWriteHookConfig, 53 | ) -> Any: 54 | """Invokes the formatter registered for the given name. 55 | 56 | :param name: The name of a formatter in the registry 57 | :param revision: string path to the revision file 58 | :param options: A dict containing kwargs passed to the 59 | specified formatter. 60 | :raises: :class:`alembic.util.CommandError` 61 | """ 62 | revision_path = _preserving_path_as_str(revision_path) 63 | try: 64 | hook = _registry[name] 65 | except KeyError as ke: 66 | raise util.CommandError( 67 | f"No formatter with name '{name}' registered" 68 | ) from ke 69 | else: 70 | return hook(revision_path, options) 71 | 72 | 73 | def _run_hooks( 74 | path: Union[str, os.PathLike[str]], hooks: list[PostWriteHookConfig] 75 | ) -> None: 76 | """Invoke hooks for a generated revision.""" 77 | 78 | for hook in hooks: 79 | name = hook["_hook_name"] 80 | try: 81 | type_ = hook["type"] 82 | except KeyError as ke: 83 | raise util.CommandError( 84 | f"Key '{name}.type' (or 'type' in toml) is required " 85 | f"for post write hook {name!r}" 86 | ) from ke 87 | else: 88 | with util.status( 89 | f"Running post write hook {name!r}", newline=True 90 | ): 91 | _invoke(type_, path, hook) 92 | 93 | 94 | def _parse_cmdline_options(cmdline_options_str: str, path: str) -> List[str]: 95 | """Parse options from a string into a list. 96 | 97 | Also substitutes the revision script token with the actual filename of 98 | the revision script. 99 | 100 | If the revision script token doesn't occur in the options string, it is 101 | automatically prepended. 102 | """ 103 | if REVISION_SCRIPT_TOKEN not in cmdline_options_str: 104 | cmdline_options_str = REVISION_SCRIPT_TOKEN + " " + cmdline_options_str 105 | cmdline_options_list = shlex.split( 106 | cmdline_options_str, posix=compat.is_posix 107 | ) 108 | cmdline_options_list = [ 109 | option.replace(REVISION_SCRIPT_TOKEN, path) 110 | for option in cmdline_options_list 111 | ] 112 | return cmdline_options_list 113 | 114 | 115 | @register("console_scripts") 116 | def console_scripts( 117 | path: str, options: dict, ignore_output: bool = False 118 | ) -> None: 119 | try: 120 | entrypoint_name = options["entrypoint"] 121 | except KeyError as ke: 122 | raise util.CommandError( 123 | f"Key {options['_hook_name']}.entrypoint is required for post " 124 | f"write hook {options['_hook_name']!r}" 125 | ) from ke 126 | for entry in compat.importlib_metadata_get("console_scripts"): 127 | if entry.name == entrypoint_name: 128 | impl: Any = entry 129 | break 130 | else: 131 | raise util.CommandError( 132 | f"Could not find entrypoint console_scripts.{entrypoint_name}" 133 | ) 134 | cwd: Optional[str] = options.get("cwd", None) 135 | cmdline_options_str = options.get("options", "") 136 | cmdline_options_list = _parse_cmdline_options(cmdline_options_str, path) 137 | 138 | kw: Dict[str, Any] = {} 139 | if ignore_output: 140 | kw["stdout"] = kw["stderr"] = subprocess.DEVNULL 141 | 142 | subprocess.run( 143 | [ 144 | sys.executable, 145 | "-c", 146 | f"import {impl.module}; {impl.module}.{impl.attr}()", 147 | ] 148 | + cmdline_options_list, 149 | cwd=cwd, 150 | **kw, 151 | ) 152 | 153 | 154 | @register("exec") 155 | def exec_(path: str, options: dict, ignore_output: bool = False) -> None: 156 | try: 157 | executable = options["executable"] 158 | except KeyError as ke: 159 | raise util.CommandError( 160 | f"Key {options['_hook_name']}.executable is required for post " 161 | f"write hook {options['_hook_name']!r}" 162 | ) from ke 163 | cwd: Optional[str] = options.get("cwd", None) 164 | cmdline_options_str = options.get("options", "") 165 | cmdline_options_list = _parse_cmdline_options(cmdline_options_str, path) 166 | 167 | kw: Dict[str, Any] = {} 168 | if ignore_output: 169 | kw["stdout"] = kw["stderr"] = subprocess.DEVNULL 170 | 171 | subprocess.run( 172 | [ 173 | executable, 174 | *cmdline_options_list, 175 | ], 176 | cwd=cwd, 177 | **kw, 178 | ) 179 | -------------------------------------------------------------------------------- /alembic/templates/async/README: -------------------------------------------------------------------------------- 1 | Generic single-database configuration with an async dbapi. -------------------------------------------------------------------------------- /alembic/templates/async/alembic.ini.mako: -------------------------------------------------------------------------------- 1 | # A generic, single database configuration. 2 | 3 | [alembic] 4 | # path to migration scripts. 5 | # this is typically a path given in POSIX (e.g. forward slashes) 6 | # format, relative to the token %(here)s which refers to the location of this 7 | # ini file 8 | script_location = ${script_location} 9 | 10 | # template used to generate migration file names; The default value is %%(rev)s_%%(slug)s 11 | # Uncomment the line below if you want the files to be prepended with date and time 12 | # see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file 13 | # for all available tokens 14 | # file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s 15 | 16 | # sys.path path, will be prepended to sys.path if present. 17 | # defaults to the current working directory. for multiple paths, the path separator 18 | # is defined by "path_separator" below. 19 | prepend_sys_path = . 20 | 21 | # timezone to use when rendering the date within the migration file 22 | # as well as the filename. 23 | # If specified, requires the python>=3.9 or backports.zoneinfo library and tzdata library. 24 | # Any required deps can installed by adding `alembic[tz]` to the pip requirements 25 | # string value is passed to ZoneInfo() 26 | # leave blank for localtime 27 | # timezone = 28 | 29 | # max length of characters to apply to the "slug" field 30 | # truncate_slug_length = 40 31 | 32 | # set to 'true' to run the environment during 33 | # the 'revision' command, regardless of autogenerate 34 | # revision_environment = false 35 | 36 | # set to 'true' to allow .pyc and .pyo files without 37 | # a source .py file to be detected as revisions in the 38 | # versions/ directory 39 | # sourceless = false 40 | 41 | # version location specification; This defaults 42 | # to /versions. When using multiple version 43 | # directories, initial revisions must be specified with --version-path. 44 | # The path separator used here should be the separator specified by "path_separator" 45 | # below. 46 | # version_locations = %(here)s/bar:%(here)s/bat:%(here)s/alembic/versions 47 | 48 | # path_separator; This indicates what character is used to split lists of file 49 | # paths, including version_locations and prepend_sys_path within configparser 50 | # files such as alembic.ini. 51 | # The default rendered in new alembic.ini files is "os", which uses os.pathsep 52 | # to provide os-dependent path splitting. 53 | # 54 | # Note that in order to support legacy alembic.ini files, this default does NOT 55 | # take place if path_separator is not present in alembic.ini. If this 56 | # option is omitted entirely, fallback logic is as follows: 57 | # 58 | # 1. Parsing of the version_locations option falls back to using the legacy 59 | # "version_path_separator" key, which if absent then falls back to the legacy 60 | # behavior of splitting on spaces and/or commas. 61 | # 2. Parsing of the prepend_sys_path option falls back to the legacy 62 | # behavior of splitting on spaces, commas, or colons. 63 | # 64 | # Valid values for path_separator are: 65 | # 66 | # path_separator = : 67 | # path_separator = ; 68 | # path_separator = space 69 | # path_separator = newline 70 | # 71 | # Use os.pathsep. Default configuration used for new projects. 72 | path_separator = os 73 | 74 | 75 | # set to 'true' to search source files recursively 76 | # in each "version_locations" directory 77 | # new in Alembic version 1.10 78 | # recursive_version_locations = false 79 | 80 | # the output encoding used when revision files 81 | # are written from script.py.mako 82 | # output_encoding = utf-8 83 | 84 | # database URL. This is consumed by the user-maintained env.py script only. 85 | # other means of configuring database URLs may be customized within the env.py 86 | # file. 87 | sqlalchemy.url = driver://user:pass@localhost/dbname 88 | 89 | 90 | [post_write_hooks] 91 | # post_write_hooks defines scripts or Python functions that are run 92 | # on newly generated revision scripts. See the documentation for further 93 | # detail and examples 94 | 95 | # format using "black" - use the console_scripts runner, against the "black" entrypoint 96 | # hooks = black 97 | # black.type = console_scripts 98 | # black.entrypoint = black 99 | # black.options = -l 79 REVISION_SCRIPT_FILENAME 100 | 101 | # lint with attempts to fix using "ruff" - use the exec runner, execute a binary 102 | # hooks = ruff 103 | # ruff.type = exec 104 | # ruff.executable = %(here)s/.venv/bin/ruff 105 | # ruff.options = check --fix REVISION_SCRIPT_FILENAME 106 | 107 | # Logging configuration. This is also consumed by the user-maintained 108 | # env.py script only. 109 | [loggers] 110 | keys = root,sqlalchemy,alembic 111 | 112 | [handlers] 113 | keys = console 114 | 115 | [formatters] 116 | keys = generic 117 | 118 | [logger_root] 119 | level = WARNING 120 | handlers = console 121 | qualname = 122 | 123 | [logger_sqlalchemy] 124 | level = WARNING 125 | handlers = 126 | qualname = sqlalchemy.engine 127 | 128 | [logger_alembic] 129 | level = INFO 130 | handlers = 131 | qualname = alembic 132 | 133 | [handler_console] 134 | class = StreamHandler 135 | args = (sys.stderr,) 136 | level = NOTSET 137 | formatter = generic 138 | 139 | [formatter_generic] 140 | format = %(levelname)-5.5s [%(name)s] %(message)s 141 | datefmt = %H:%M:%S 142 | -------------------------------------------------------------------------------- /alembic/templates/async/env.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | from logging.config import fileConfig 3 | 4 | from sqlalchemy import pool 5 | from sqlalchemy.engine import Connection 6 | from sqlalchemy.ext.asyncio import async_engine_from_config 7 | 8 | from alembic import context 9 | 10 | # this is the Alembic Config object, which provides 11 | # access to the values within the .ini file in use. 12 | config = context.config 13 | 14 | # Interpret the config file for Python logging. 15 | # This line sets up loggers basically. 16 | if config.config_file_name is not None: 17 | fileConfig(config.config_file_name) 18 | 19 | # add your model's MetaData object here 20 | # for 'autogenerate' support 21 | # from myapp import mymodel 22 | # target_metadata = mymodel.Base.metadata 23 | target_metadata = None 24 | 25 | # other values from the config, defined by the needs of env.py, 26 | # can be acquired: 27 | # my_important_option = config.get_main_option("my_important_option") 28 | # ... etc. 29 | 30 | 31 | def run_migrations_offline() -> None: 32 | """Run migrations in 'offline' mode. 33 | 34 | This configures the context with just a URL 35 | and not an Engine, though an Engine is acceptable 36 | here as well. By skipping the Engine creation 37 | we don't even need a DBAPI to be available. 38 | 39 | Calls to context.execute() here emit the given string to the 40 | script output. 41 | 42 | """ 43 | url = config.get_main_option("sqlalchemy.url") 44 | context.configure( 45 | url=url, 46 | target_metadata=target_metadata, 47 | literal_binds=True, 48 | dialect_opts={"paramstyle": "named"}, 49 | ) 50 | 51 | with context.begin_transaction(): 52 | context.run_migrations() 53 | 54 | 55 | def do_run_migrations(connection: Connection) -> None: 56 | context.configure(connection=connection, target_metadata=target_metadata) 57 | 58 | with context.begin_transaction(): 59 | context.run_migrations() 60 | 61 | 62 | async def run_async_migrations() -> None: 63 | """In this scenario we need to create an Engine 64 | and associate a connection with the context. 65 | 66 | """ 67 | 68 | connectable = async_engine_from_config( 69 | config.get_section(config.config_ini_section, {}), 70 | prefix="sqlalchemy.", 71 | poolclass=pool.NullPool, 72 | ) 73 | 74 | async with connectable.connect() as connection: 75 | await connection.run_sync(do_run_migrations) 76 | 77 | await connectable.dispose() 78 | 79 | 80 | def run_migrations_online() -> None: 81 | """Run migrations in 'online' mode.""" 82 | 83 | asyncio.run(run_async_migrations()) 84 | 85 | 86 | if context.is_offline_mode(): 87 | run_migrations_offline() 88 | else: 89 | run_migrations_online() 90 | -------------------------------------------------------------------------------- /alembic/templates/async/script.py.mako: -------------------------------------------------------------------------------- 1 | """${message} 2 | 3 | Revision ID: ${up_revision} 4 | Revises: ${down_revision | comma,n} 5 | Create Date: ${create_date} 6 | 7 | """ 8 | from typing import Sequence, Union 9 | 10 | from alembic import op 11 | import sqlalchemy as sa 12 | ${imports if imports else ""} 13 | 14 | # revision identifiers, used by Alembic. 15 | revision: str = ${repr(up_revision)} 16 | down_revision: Union[str, Sequence[str], None] = ${repr(down_revision)} 17 | branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)} 18 | depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)} 19 | 20 | 21 | def upgrade() -> None: 22 | """Upgrade schema.""" 23 | ${upgrades if upgrades else "pass"} 24 | 25 | 26 | def downgrade() -> None: 27 | """Downgrade schema.""" 28 | ${downgrades if downgrades else "pass"} 29 | -------------------------------------------------------------------------------- /alembic/templates/generic/README: -------------------------------------------------------------------------------- 1 | Generic single-database configuration. -------------------------------------------------------------------------------- /alembic/templates/generic/alembic.ini.mako: -------------------------------------------------------------------------------- 1 | # A generic, single database configuration. 2 | 3 | [alembic] 4 | # path to migration scripts. 5 | # this is typically a path given in POSIX (e.g. forward slashes) 6 | # format, relative to the token %(here)s which refers to the location of this 7 | # ini file 8 | script_location = ${script_location} 9 | 10 | # template used to generate migration file names; The default value is %%(rev)s_%%(slug)s 11 | # Uncomment the line below if you want the files to be prepended with date and time 12 | # see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file 13 | # for all available tokens 14 | # file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s 15 | 16 | # sys.path path, will be prepended to sys.path if present. 17 | # defaults to the current working directory. for multiple paths, the path separator 18 | # is defined by "path_separator" below. 19 | prepend_sys_path = . 20 | 21 | 22 | # timezone to use when rendering the date within the migration file 23 | # as well as the filename. 24 | # If specified, requires the python>=3.9 or backports.zoneinfo library and tzdata library. 25 | # Any required deps can installed by adding `alembic[tz]` to the pip requirements 26 | # string value is passed to ZoneInfo() 27 | # leave blank for localtime 28 | # timezone = 29 | 30 | # max length of characters to apply to the "slug" field 31 | # truncate_slug_length = 40 32 | 33 | # set to 'true' to run the environment during 34 | # the 'revision' command, regardless of autogenerate 35 | # revision_environment = false 36 | 37 | # set to 'true' to allow .pyc and .pyo files without 38 | # a source .py file to be detected as revisions in the 39 | # versions/ directory 40 | # sourceless = false 41 | 42 | # version location specification; This defaults 43 | # to /versions. When using multiple version 44 | # directories, initial revisions must be specified with --version-path. 45 | # The path separator used here should be the separator specified by "path_separator" 46 | # below. 47 | # version_locations = %(here)s/bar:%(here)s/bat:%(here)s/alembic/versions 48 | 49 | # path_separator; This indicates what character is used to split lists of file 50 | # paths, including version_locations and prepend_sys_path within configparser 51 | # files such as alembic.ini. 52 | # The default rendered in new alembic.ini files is "os", which uses os.pathsep 53 | # to provide os-dependent path splitting. 54 | # 55 | # Note that in order to support legacy alembic.ini files, this default does NOT 56 | # take place if path_separator is not present in alembic.ini. If this 57 | # option is omitted entirely, fallback logic is as follows: 58 | # 59 | # 1. Parsing of the version_locations option falls back to using the legacy 60 | # "version_path_separator" key, which if absent then falls back to the legacy 61 | # behavior of splitting on spaces and/or commas. 62 | # 2. Parsing of the prepend_sys_path option falls back to the legacy 63 | # behavior of splitting on spaces, commas, or colons. 64 | # 65 | # Valid values for path_separator are: 66 | # 67 | # path_separator = : 68 | # path_separator = ; 69 | # path_separator = space 70 | # path_separator = newline 71 | # 72 | # Use os.pathsep. Default configuration used for new projects. 73 | path_separator = os 74 | 75 | # set to 'true' to search source files recursively 76 | # in each "version_locations" directory 77 | # new in Alembic version 1.10 78 | # recursive_version_locations = false 79 | 80 | # the output encoding used when revision files 81 | # are written from script.py.mako 82 | # output_encoding = utf-8 83 | 84 | # database URL. This is consumed by the user-maintained env.py script only. 85 | # other means of configuring database URLs may be customized within the env.py 86 | # file. 87 | sqlalchemy.url = driver://user:pass@localhost/dbname 88 | 89 | 90 | [post_write_hooks] 91 | # post_write_hooks defines scripts or Python functions that are run 92 | # on newly generated revision scripts. See the documentation for further 93 | # detail and examples 94 | 95 | # format using "black" - use the console_scripts runner, against the "black" entrypoint 96 | # hooks = black 97 | # black.type = console_scripts 98 | # black.entrypoint = black 99 | # black.options = -l 79 REVISION_SCRIPT_FILENAME 100 | 101 | # lint with attempts to fix using "ruff" - use the exec runner, execute a binary 102 | # hooks = ruff 103 | # ruff.type = exec 104 | # ruff.executable = %(here)s/.venv/bin/ruff 105 | # ruff.options = check --fix REVISION_SCRIPT_FILENAME 106 | 107 | # Logging configuration. This is also consumed by the user-maintained 108 | # env.py script only. 109 | [loggers] 110 | keys = root,sqlalchemy,alembic 111 | 112 | [handlers] 113 | keys = console 114 | 115 | [formatters] 116 | keys = generic 117 | 118 | [logger_root] 119 | level = WARNING 120 | handlers = console 121 | qualname = 122 | 123 | [logger_sqlalchemy] 124 | level = WARNING 125 | handlers = 126 | qualname = sqlalchemy.engine 127 | 128 | [logger_alembic] 129 | level = INFO 130 | handlers = 131 | qualname = alembic 132 | 133 | [handler_console] 134 | class = StreamHandler 135 | args = (sys.stderr,) 136 | level = NOTSET 137 | formatter = generic 138 | 139 | [formatter_generic] 140 | format = %(levelname)-5.5s [%(name)s] %(message)s 141 | datefmt = %H:%M:%S 142 | -------------------------------------------------------------------------------- /alembic/templates/generic/env.py: -------------------------------------------------------------------------------- 1 | from logging.config import fileConfig 2 | 3 | from sqlalchemy import engine_from_config 4 | from sqlalchemy import pool 5 | 6 | from alembic import context 7 | 8 | # this is the Alembic Config object, which provides 9 | # access to the values within the .ini file in use. 10 | config = context.config 11 | 12 | # Interpret the config file for Python logging. 13 | # This line sets up loggers basically. 14 | if config.config_file_name is not None: 15 | fileConfig(config.config_file_name) 16 | 17 | # add your model's MetaData object here 18 | # for 'autogenerate' support 19 | # from myapp import mymodel 20 | # target_metadata = mymodel.Base.metadata 21 | target_metadata = None 22 | 23 | # other values from the config, defined by the needs of env.py, 24 | # can be acquired: 25 | # my_important_option = config.get_main_option("my_important_option") 26 | # ... etc. 27 | 28 | 29 | def run_migrations_offline() -> None: 30 | """Run migrations in 'offline' mode. 31 | 32 | This configures the context with just a URL 33 | and not an Engine, though an Engine is acceptable 34 | here as well. By skipping the Engine creation 35 | we don't even need a DBAPI to be available. 36 | 37 | Calls to context.execute() here emit the given string to the 38 | script output. 39 | 40 | """ 41 | url = config.get_main_option("sqlalchemy.url") 42 | context.configure( 43 | url=url, 44 | target_metadata=target_metadata, 45 | literal_binds=True, 46 | dialect_opts={"paramstyle": "named"}, 47 | ) 48 | 49 | with context.begin_transaction(): 50 | context.run_migrations() 51 | 52 | 53 | def run_migrations_online() -> None: 54 | """Run migrations in 'online' mode. 55 | 56 | In this scenario we need to create an Engine 57 | and associate a connection with the context. 58 | 59 | """ 60 | connectable = engine_from_config( 61 | config.get_section(config.config_ini_section, {}), 62 | prefix="sqlalchemy.", 63 | poolclass=pool.NullPool, 64 | ) 65 | 66 | with connectable.connect() as connection: 67 | context.configure( 68 | connection=connection, target_metadata=target_metadata 69 | ) 70 | 71 | with context.begin_transaction(): 72 | context.run_migrations() 73 | 74 | 75 | if context.is_offline_mode(): 76 | run_migrations_offline() 77 | else: 78 | run_migrations_online() 79 | -------------------------------------------------------------------------------- /alembic/templates/generic/script.py.mako: -------------------------------------------------------------------------------- 1 | """${message} 2 | 3 | Revision ID: ${up_revision} 4 | Revises: ${down_revision | comma,n} 5 | Create Date: ${create_date} 6 | 7 | """ 8 | from typing import Sequence, Union 9 | 10 | from alembic import op 11 | import sqlalchemy as sa 12 | ${imports if imports else ""} 13 | 14 | # revision identifiers, used by Alembic. 15 | revision: str = ${repr(up_revision)} 16 | down_revision: Union[str, Sequence[str], None] = ${repr(down_revision)} 17 | branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)} 18 | depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)} 19 | 20 | 21 | def upgrade() -> None: 22 | """Upgrade schema.""" 23 | ${upgrades if upgrades else "pass"} 24 | 25 | 26 | def downgrade() -> None: 27 | """Downgrade schema.""" 28 | ${downgrades if downgrades else "pass"} 29 | -------------------------------------------------------------------------------- /alembic/templates/multidb/README: -------------------------------------------------------------------------------- 1 | Rudimentary multi-database configuration. 2 | 3 | Multi-DB isn't vastly different from generic. The primary difference is that it 4 | will run the migrations N times (depending on how many databases you have 5 | configured), providing one engine name and associated context for each run. 6 | 7 | That engine name will then allow the migration to restrict what runs within it to 8 | just the appropriate migrations for that engine. You can see this behavior within 9 | the mako template. 10 | 11 | In the provided configuration, you'll need to have `databases` provided in 12 | alembic's config, and an `sqlalchemy.url` provided for each engine name. 13 | -------------------------------------------------------------------------------- /alembic/templates/multidb/alembic.ini.mako: -------------------------------------------------------------------------------- 1 | # a multi-database configuration. 2 | 3 | [alembic] 4 | # path to migration scripts. 5 | # this is typically a path given in POSIX (e.g. forward slashes) 6 | # format, relative to the token %(here)s which refers to the location of this 7 | # ini file 8 | script_location = ${script_location} 9 | 10 | # template used to generate migration file names; The default value is %%(rev)s_%%(slug)s 11 | # Uncomment the line below if you want the files to be prepended with date and time 12 | # see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file 13 | # for all available tokens 14 | # file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s 15 | 16 | # sys.path path, will be prepended to sys.path if present. 17 | # defaults to the current working directory. for multiple paths, the path separator 18 | # is defined by "path_separator" below. 19 | prepend_sys_path = . 20 | 21 | # timezone to use when rendering the date within the migration file 22 | # as well as the filename. 23 | # If specified, requires the python>=3.9 or backports.zoneinfo library and tzdata library. 24 | # Any required deps can installed by adding `alembic[tz]` to the pip requirements 25 | # string value is passed to ZoneInfo() 26 | # leave blank for localtime 27 | # timezone = 28 | 29 | # max length of characters to apply to the "slug" field 30 | # truncate_slug_length = 40 31 | 32 | # set to 'true' to run the environment during 33 | # the 'revision' command, regardless of autogenerate 34 | # revision_environment = false 35 | 36 | # set to 'true' to allow .pyc and .pyo files without 37 | # a source .py file to be detected as revisions in the 38 | # versions/ directory 39 | # sourceless = false 40 | 41 | # version location specification; This defaults 42 | # to /versions. When using multiple version 43 | # directories, initial revisions must be specified with --version-path. 44 | # The path separator used here should be the separator specified by "path_separator" 45 | # below. 46 | # version_locations = %(here)s/bar:%(here)s/bat:%(here)s/alembic/versions 47 | 48 | # path_separator; This indicates what character is used to split lists of file 49 | # paths, including version_locations and prepend_sys_path within configparser 50 | # files such as alembic.ini. 51 | # The default rendered in new alembic.ini files is "os", which uses os.pathsep 52 | # to provide os-dependent path splitting. 53 | # 54 | # Note that in order to support legacy alembic.ini files, this default does NOT 55 | # take place if path_separator is not present in alembic.ini. If this 56 | # option is omitted entirely, fallback logic is as follows: 57 | # 58 | # 1. Parsing of the version_locations option falls back to using the legacy 59 | # "version_path_separator" key, which if absent then falls back to the legacy 60 | # behavior of splitting on spaces and/or commas. 61 | # 2. Parsing of the prepend_sys_path option falls back to the legacy 62 | # behavior of splitting on spaces, commas, or colons. 63 | # 64 | # Valid values for path_separator are: 65 | # 66 | # path_separator = : 67 | # path_separator = ; 68 | # path_separator = space 69 | # path_separator = newline 70 | # 71 | # Use os.pathsep. Default configuration used for new projects. 72 | path_separator = os 73 | 74 | # set to 'true' to search source files recursively 75 | # in each "version_locations" directory 76 | # new in Alembic version 1.10 77 | # recursive_version_locations = false 78 | 79 | # the output encoding used when revision files 80 | # are written from script.py.mako 81 | # output_encoding = utf-8 82 | 83 | # for multiple database configuration, new named sections are added 84 | # which each include a distinct ``sqlalchemy.url`` entry. A custom value 85 | # ``databases`` is added which indicates a listing of the per-database sections. 86 | # The ``databases`` entry as well as the URLs present in the ``[engine1]`` 87 | # and ``[engine2]`` sections continue to be consumed by the user-maintained env.py 88 | # script only. 89 | 90 | databases = engine1, engine2 91 | 92 | [engine1] 93 | sqlalchemy.url = driver://user:pass@localhost/dbname 94 | 95 | [engine2] 96 | sqlalchemy.url = driver://user:pass@localhost/dbname2 97 | 98 | [post_write_hooks] 99 | # post_write_hooks defines scripts or Python functions that are run 100 | # on newly generated revision scripts. See the documentation for further 101 | # detail and examples 102 | 103 | # format using "black" - use the console_scripts runner, against the "black" entrypoint 104 | # hooks = black 105 | # black.type = console_scripts 106 | # black.entrypoint = black 107 | # black.options = -l 79 REVISION_SCRIPT_FILENAME 108 | 109 | # lint with attempts to fix using "ruff" - use the exec runner, execute a binary 110 | # hooks = ruff 111 | # ruff.type = exec 112 | # ruff.executable = %(here)s/.venv/bin/ruff 113 | # ruff.options = check --fix REVISION_SCRIPT_FILENAME 114 | 115 | # Logging configuration. This is also consumed by the user-maintained 116 | # env.py script only. 117 | [loggers] 118 | keys = root,sqlalchemy,alembic 119 | 120 | [handlers] 121 | keys = console 122 | 123 | [formatters] 124 | keys = generic 125 | 126 | [logger_root] 127 | level = WARNING 128 | handlers = console 129 | qualname = 130 | 131 | [logger_sqlalchemy] 132 | level = WARNING 133 | handlers = 134 | qualname = sqlalchemy.engine 135 | 136 | [logger_alembic] 137 | level = INFO 138 | handlers = 139 | qualname = alembic 140 | 141 | [handler_console] 142 | class = StreamHandler 143 | args = (sys.stderr,) 144 | level = NOTSET 145 | formatter = generic 146 | 147 | [formatter_generic] 148 | format = %(levelname)-5.5s [%(name)s] %(message)s 149 | datefmt = %H:%M:%S 150 | -------------------------------------------------------------------------------- /alembic/templates/multidb/env.py: -------------------------------------------------------------------------------- 1 | import logging 2 | from logging.config import fileConfig 3 | import re 4 | 5 | from sqlalchemy import engine_from_config 6 | from sqlalchemy import pool 7 | 8 | from alembic import context 9 | 10 | USE_TWOPHASE = False 11 | 12 | # this is the Alembic Config object, which provides 13 | # access to the values within the .ini file in use. 14 | config = context.config 15 | 16 | # Interpret the config file for Python logging. 17 | # This line sets up loggers basically. 18 | if config.config_file_name is not None: 19 | fileConfig(config.config_file_name) 20 | logger = logging.getLogger("alembic.env") 21 | 22 | # gather section names referring to different 23 | # databases. These are named "engine1", "engine2" 24 | # in the sample .ini file. 25 | db_names = config.get_main_option("databases", "") 26 | 27 | # add your model's MetaData objects here 28 | # for 'autogenerate' support. These must be set 29 | # up to hold just those tables targeting a 30 | # particular database. table.tometadata() may be 31 | # helpful here in case a "copy" of 32 | # a MetaData is needed. 33 | # from myapp import mymodel 34 | # target_metadata = { 35 | # 'engine1':mymodel.metadata1, 36 | # 'engine2':mymodel.metadata2 37 | # } 38 | target_metadata = {} 39 | 40 | # other values from the config, defined by the needs of env.py, 41 | # can be acquired: 42 | # my_important_option = config.get_main_option("my_important_option") 43 | # ... etc. 44 | 45 | 46 | def run_migrations_offline() -> None: 47 | """Run migrations in 'offline' mode. 48 | 49 | This configures the context with just a URL 50 | and not an Engine, though an Engine is acceptable 51 | here as well. By skipping the Engine creation 52 | we don't even need a DBAPI to be available. 53 | 54 | Calls to context.execute() here emit the given string to the 55 | script output. 56 | 57 | """ 58 | # for the --sql use case, run migrations for each URL into 59 | # individual files. 60 | 61 | engines = {} 62 | for name in re.split(r",\s*", db_names): 63 | engines[name] = rec = {} 64 | rec["url"] = context.config.get_section_option(name, "sqlalchemy.url") 65 | 66 | for name, rec in engines.items(): 67 | logger.info("Migrating database %s" % name) 68 | file_ = "%s.sql" % name 69 | logger.info("Writing output to %s" % file_) 70 | with open(file_, "w") as buffer: 71 | context.configure( 72 | url=rec["url"], 73 | output_buffer=buffer, 74 | target_metadata=target_metadata.get(name), 75 | literal_binds=True, 76 | dialect_opts={"paramstyle": "named"}, 77 | ) 78 | with context.begin_transaction(): 79 | context.run_migrations(engine_name=name) 80 | 81 | 82 | def run_migrations_online() -> None: 83 | """Run migrations in 'online' mode. 84 | 85 | In this scenario we need to create an Engine 86 | and associate a connection with the context. 87 | 88 | """ 89 | 90 | # for the direct-to-DB use case, start a transaction on all 91 | # engines, then run all migrations, then commit all transactions. 92 | 93 | engines = {} 94 | for name in re.split(r",\s*", db_names): 95 | engines[name] = rec = {} 96 | rec["engine"] = engine_from_config( 97 | context.config.get_section(name, {}), 98 | prefix="sqlalchemy.", 99 | poolclass=pool.NullPool, 100 | ) 101 | 102 | for name, rec in engines.items(): 103 | engine = rec["engine"] 104 | rec["connection"] = conn = engine.connect() 105 | 106 | if USE_TWOPHASE: 107 | rec["transaction"] = conn.begin_twophase() 108 | else: 109 | rec["transaction"] = conn.begin() 110 | 111 | try: 112 | for name, rec in engines.items(): 113 | logger.info("Migrating database %s" % name) 114 | context.configure( 115 | connection=rec["connection"], 116 | upgrade_token="%s_upgrades" % name, 117 | downgrade_token="%s_downgrades" % name, 118 | target_metadata=target_metadata.get(name), 119 | ) 120 | context.run_migrations(engine_name=name) 121 | 122 | if USE_TWOPHASE: 123 | for rec in engines.values(): 124 | rec["transaction"].prepare() 125 | 126 | for rec in engines.values(): 127 | rec["transaction"].commit() 128 | except: 129 | for rec in engines.values(): 130 | rec["transaction"].rollback() 131 | raise 132 | finally: 133 | for rec in engines.values(): 134 | rec["connection"].close() 135 | 136 | 137 | if context.is_offline_mode(): 138 | run_migrations_offline() 139 | else: 140 | run_migrations_online() 141 | -------------------------------------------------------------------------------- /alembic/templates/multidb/script.py.mako: -------------------------------------------------------------------------------- 1 | <%! 2 | import re 3 | 4 | %>"""${message} 5 | 6 | Revision ID: ${up_revision} 7 | Revises: ${down_revision | comma,n} 8 | Create Date: ${create_date} 9 | 10 | """ 11 | from typing import Sequence, Union 12 | 13 | from alembic import op 14 | import sqlalchemy as sa 15 | ${imports if imports else ""} 16 | 17 | # revision identifiers, used by Alembic. 18 | revision: str = ${repr(up_revision)} 19 | down_revision: Union[str, Sequence[str], None] = ${repr(down_revision)} 20 | branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)} 21 | depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)} 22 | 23 | 24 | def upgrade(engine_name: str) -> None: 25 | """Upgrade schema.""" 26 | globals()["upgrade_%s" % engine_name]() 27 | 28 | 29 | def downgrade(engine_name: str) -> None: 30 | """Downgrade schema.""" 31 | globals()["downgrade_%s" % engine_name]() 32 | 33 | <% 34 | db_names = config.get_main_option("databases") 35 | %> 36 | 37 | ## generate an "upgrade_() / downgrade_()" function 38 | ## for each database name in the ini file. 39 | 40 | % for db_name in re.split(r',\s*', db_names): 41 | 42 | def upgrade_${db_name}() -> None: 43 | """Upgrade ${db_name} schema.""" 44 | ${context.get("%s_upgrades" % db_name, "pass")} 45 | 46 | 47 | def downgrade_${db_name}() -> None: 48 | """Downgrade ${db_name} schema.""" 49 | ${context.get("%s_downgrades" % db_name, "pass")} 50 | 51 | % endfor 52 | -------------------------------------------------------------------------------- /alembic/templates/pyproject/README: -------------------------------------------------------------------------------- 1 | pyproject configuration, based on the generic configuration. -------------------------------------------------------------------------------- /alembic/templates/pyproject/alembic.ini.mako: -------------------------------------------------------------------------------- 1 | # A generic, single database configuration. 2 | 3 | [alembic] 4 | 5 | # database URL. This is consumed by the user-maintained env.py script only. 6 | # other means of configuring database URLs may be customized within the env.py 7 | # file. 8 | sqlalchemy.url = driver://user:pass@localhost/dbname 9 | 10 | 11 | # Logging configuration 12 | [loggers] 13 | keys = root,sqlalchemy,alembic 14 | 15 | [handlers] 16 | keys = console 17 | 18 | [formatters] 19 | keys = generic 20 | 21 | [logger_root] 22 | level = WARNING 23 | handlers = console 24 | qualname = 25 | 26 | [logger_sqlalchemy] 27 | level = WARNING 28 | handlers = 29 | qualname = sqlalchemy.engine 30 | 31 | [logger_alembic] 32 | level = INFO 33 | handlers = 34 | qualname = alembic 35 | 36 | [handler_console] 37 | class = StreamHandler 38 | args = (sys.stderr,) 39 | level = NOTSET 40 | formatter = generic 41 | 42 | [formatter_generic] 43 | format = %(levelname)-5.5s [%(name)s] %(message)s 44 | datefmt = %H:%M:%S 45 | -------------------------------------------------------------------------------- /alembic/templates/pyproject/env.py: -------------------------------------------------------------------------------- 1 | from logging.config import fileConfig 2 | 3 | from sqlalchemy import engine_from_config 4 | from sqlalchemy import pool 5 | 6 | from alembic import context 7 | 8 | # this is the Alembic Config object, which provides 9 | # access to the values within the .ini file in use. 10 | config = context.config 11 | 12 | # Interpret the config file for Python logging. 13 | # This line sets up loggers basically. 14 | if config.config_file_name is not None: 15 | fileConfig(config.config_file_name) 16 | 17 | # add your model's MetaData object here 18 | # for 'autogenerate' support 19 | # from myapp import mymodel 20 | # target_metadata = mymodel.Base.metadata 21 | target_metadata = None 22 | 23 | # other values from the config, defined by the needs of env.py, 24 | # can be acquired: 25 | # my_important_option = config.get_main_option("my_important_option") 26 | # ... etc. 27 | 28 | 29 | def run_migrations_offline() -> None: 30 | """Run migrations in 'offline' mode. 31 | 32 | This configures the context with just a URL 33 | and not an Engine, though an Engine is acceptable 34 | here as well. By skipping the Engine creation 35 | we don't even need a DBAPI to be available. 36 | 37 | Calls to context.execute() here emit the given string to the 38 | script output. 39 | 40 | """ 41 | url = config.get_main_option("sqlalchemy.url") 42 | context.configure( 43 | url=url, 44 | target_metadata=target_metadata, 45 | literal_binds=True, 46 | dialect_opts={"paramstyle": "named"}, 47 | ) 48 | 49 | with context.begin_transaction(): 50 | context.run_migrations() 51 | 52 | 53 | def run_migrations_online() -> None: 54 | """Run migrations in 'online' mode. 55 | 56 | In this scenario we need to create an Engine 57 | and associate a connection with the context. 58 | 59 | """ 60 | connectable = engine_from_config( 61 | config.get_section(config.config_ini_section, {}), 62 | prefix="sqlalchemy.", 63 | poolclass=pool.NullPool, 64 | ) 65 | 66 | with connectable.connect() as connection: 67 | context.configure( 68 | connection=connection, target_metadata=target_metadata 69 | ) 70 | 71 | with context.begin_transaction(): 72 | context.run_migrations() 73 | 74 | 75 | if context.is_offline_mode(): 76 | run_migrations_offline() 77 | else: 78 | run_migrations_online() 79 | -------------------------------------------------------------------------------- /alembic/templates/pyproject/pyproject.toml.mako: -------------------------------------------------------------------------------- 1 | [tool.alembic] 2 | 3 | # path to migration scripts. 4 | # this is typically a path given in POSIX (e.g. forward slashes) 5 | # format, relative to the token %(here)s which refers to the location of this 6 | # ini file 7 | script_location = "${script_location}" 8 | 9 | # template used to generate migration file names; The default value is %%(rev)s_%%(slug)s 10 | # Uncomment the line below if you want the files to be prepended with date and time 11 | # see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file 12 | # for all available tokens 13 | # file_template = "%%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s" 14 | 15 | # additional paths to be prepended to sys.path. defaults to the current working directory. 16 | prepend_sys_path = [ 17 | "." 18 | ] 19 | 20 | # timezone to use when rendering the date within the migration file 21 | # as well as the filename. 22 | # If specified, requires the python>=3.9 or backports.zoneinfo library and tzdata library. 23 | # Any required deps can installed by adding `alembic[tz]` to the pip requirements 24 | # string value is passed to ZoneInfo() 25 | # leave blank for localtime 26 | # timezone = 27 | 28 | # max length of characters to apply to the "slug" field 29 | # truncate_slug_length = 40 30 | 31 | # set to 'true' to run the environment during 32 | # the 'revision' command, regardless of autogenerate 33 | # revision_environment = false 34 | 35 | # set to 'true' to allow .pyc and .pyo files without 36 | # a source .py file to be detected as revisions in the 37 | # versions/ directory 38 | # sourceless = false 39 | 40 | # version location specification; This defaults 41 | # to /versions. When using multiple version 42 | # directories, initial revisions must be specified with --version-path. 43 | # version_locations = [ 44 | # "%(here)s/alembic/versions", 45 | # "%(here)s/foo/bar" 46 | # ] 47 | 48 | 49 | # set to 'true' to search source files recursively 50 | # in each "version_locations" directory 51 | # new in Alembic version 1.10 52 | # recursive_version_locations = false 53 | 54 | # the output encoding used when revision files 55 | # are written from script.py.mako 56 | # output_encoding = "utf-8" 57 | 58 | # This section defines scripts or Python functions that are run 59 | # on newly generated revision scripts. See the documentation for further 60 | # detail and examples 61 | # [[tool.alembic.post_write_hooks]] 62 | # format using "black" - use the console_scripts runner, 63 | # against the "black" entrypoint 64 | # name = "black" 65 | # type = "console_scripts" 66 | # entrypoint = "black" 67 | # options = "-l 79 REVISION_SCRIPT_FILENAME" 68 | # 69 | # [[tool.alembic.post_write_hooks]] 70 | # lint with attempts to fix using "ruff" - use the exec runner, 71 | # execute a binary 72 | # name = "ruff" 73 | # type = "exec" 74 | # executable = "%(here)s/.venv/bin/ruff" 75 | # options = "check --fix REVISION_SCRIPT_FILENAME" 76 | 77 | -------------------------------------------------------------------------------- /alembic/templates/pyproject/script.py.mako: -------------------------------------------------------------------------------- 1 | """${message} 2 | 3 | Revision ID: ${up_revision} 4 | Revises: ${down_revision | comma,n} 5 | Create Date: ${create_date} 6 | 7 | """ 8 | from typing import Sequence, Union 9 | 10 | from alembic import op 11 | import sqlalchemy as sa 12 | ${imports if imports else ""} 13 | 14 | # revision identifiers, used by Alembic. 15 | revision: str = ${repr(up_revision)} 16 | down_revision: Union[str, Sequence[str], None] = ${repr(down_revision)} 17 | branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)} 18 | depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)} 19 | 20 | 21 | def upgrade() -> None: 22 | """Upgrade schema.""" 23 | ${upgrades if upgrades else "pass"} 24 | 25 | 26 | def downgrade() -> None: 27 | """Downgrade schema.""" 28 | ${downgrades if downgrades else "pass"} 29 | -------------------------------------------------------------------------------- /alembic/testing/__init__.py: -------------------------------------------------------------------------------- 1 | from sqlalchemy.testing import config 2 | from sqlalchemy.testing import emits_warning 3 | from sqlalchemy.testing import engines 4 | from sqlalchemy.testing import exclusions 5 | from sqlalchemy.testing import mock 6 | from sqlalchemy.testing import provide_metadata 7 | from sqlalchemy.testing import skip_if 8 | from sqlalchemy.testing import uses_deprecated 9 | from sqlalchemy.testing.config import combinations 10 | from sqlalchemy.testing.config import fixture 11 | from sqlalchemy.testing.config import requirements as requires 12 | from sqlalchemy.testing.config import variation 13 | 14 | from .assertions import assert_raises 15 | from .assertions import assert_raises_message 16 | from .assertions import emits_python_deprecation_warning 17 | from .assertions import eq_ 18 | from .assertions import eq_ignore_whitespace 19 | from .assertions import expect_deprecated 20 | from .assertions import expect_raises 21 | from .assertions import expect_raises_message 22 | from .assertions import expect_sqlalchemy_deprecated 23 | from .assertions import expect_sqlalchemy_deprecated_20 24 | from .assertions import expect_warnings 25 | from .assertions import is_ 26 | from .assertions import is_false 27 | from .assertions import is_not_ 28 | from .assertions import is_true 29 | from .assertions import ne_ 30 | from .fixtures import TestBase 31 | from .util import resolve_lambda 32 | -------------------------------------------------------------------------------- /alembic/testing/assertions.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import contextlib 4 | import re 5 | import sys 6 | from typing import Any 7 | from typing import Dict 8 | 9 | from sqlalchemy import exc as sa_exc 10 | from sqlalchemy.engine import default 11 | from sqlalchemy.engine import URL 12 | from sqlalchemy.testing.assertions import _expect_warnings 13 | from sqlalchemy.testing.assertions import eq_ # noqa 14 | from sqlalchemy.testing.assertions import is_ # noqa 15 | from sqlalchemy.testing.assertions import is_false # noqa 16 | from sqlalchemy.testing.assertions import is_not_ # noqa 17 | from sqlalchemy.testing.assertions import is_true # noqa 18 | from sqlalchemy.testing.assertions import ne_ # noqa 19 | from sqlalchemy.util import decorator 20 | 21 | 22 | def _assert_proper_exception_context(exception): 23 | """assert that any exception we're catching does not have a __context__ 24 | without a __cause__, and that __suppress_context__ is never set. 25 | 26 | Python 3 will report nested as exceptions as "during the handling of 27 | error X, error Y occurred". That's not what we want to do. we want 28 | these exceptions in a cause chain. 29 | 30 | """ 31 | 32 | if ( 33 | exception.__context__ is not exception.__cause__ 34 | and not exception.__suppress_context__ 35 | ): 36 | assert False, ( 37 | "Exception %r was correctly raised but did not set a cause, " 38 | "within context %r as its cause." 39 | % (exception, exception.__context__) 40 | ) 41 | 42 | 43 | def assert_raises(except_cls, callable_, *args, **kw): 44 | return _assert_raises(except_cls, callable_, args, kw, check_context=True) 45 | 46 | 47 | def assert_raises_context_ok(except_cls, callable_, *args, **kw): 48 | return _assert_raises(except_cls, callable_, args, kw) 49 | 50 | 51 | def assert_raises_message(except_cls, msg, callable_, *args, **kwargs): 52 | return _assert_raises( 53 | except_cls, callable_, args, kwargs, msg=msg, check_context=True 54 | ) 55 | 56 | 57 | def assert_raises_message_context_ok( 58 | except_cls, msg, callable_, *args, **kwargs 59 | ): 60 | return _assert_raises(except_cls, callable_, args, kwargs, msg=msg) 61 | 62 | 63 | def _assert_raises( 64 | except_cls, callable_, args, kwargs, msg=None, check_context=False 65 | ): 66 | with _expect_raises(except_cls, msg, check_context) as ec: 67 | callable_(*args, **kwargs) 68 | return ec.error 69 | 70 | 71 | class _ErrorContainer: 72 | error: Any = None 73 | 74 | 75 | @contextlib.contextmanager 76 | def _expect_raises( 77 | except_cls, msg=None, check_context=False, text_exact=False 78 | ): 79 | ec = _ErrorContainer() 80 | if check_context: 81 | are_we_already_in_a_traceback = sys.exc_info()[0] 82 | try: 83 | yield ec 84 | success = False 85 | except except_cls as err: 86 | ec.error = err 87 | success = True 88 | if msg is not None: 89 | if text_exact: 90 | assert str(err) == msg, f"{msg} != {err}" 91 | else: 92 | assert re.search(msg, str(err), re.UNICODE), f"{msg} !~ {err}" 93 | if check_context and not are_we_already_in_a_traceback: 94 | _assert_proper_exception_context(err) 95 | print(str(err).encode("utf-8")) 96 | 97 | # assert outside the block so it works for AssertionError too ! 98 | assert success, "Callable did not raise an exception" 99 | 100 | 101 | def expect_raises(except_cls, check_context=True): 102 | return _expect_raises(except_cls, check_context=check_context) 103 | 104 | 105 | def expect_raises_message( 106 | except_cls, msg, check_context=True, text_exact=False 107 | ): 108 | return _expect_raises( 109 | except_cls, msg=msg, check_context=check_context, text_exact=text_exact 110 | ) 111 | 112 | 113 | def eq_ignore_whitespace(a, b, msg=None): 114 | a = re.sub(r"^\s+?|\n", "", a) 115 | a = re.sub(r" {2,}", " ", a) 116 | b = re.sub(r"^\s+?|\n", "", b) 117 | b = re.sub(r" {2,}", " ", b) 118 | 119 | assert a == b, msg or "%r != %r" % (a, b) 120 | 121 | 122 | _dialect_mods: Dict[Any, Any] = {} 123 | 124 | 125 | def _get_dialect(name): 126 | if name is None or name == "default": 127 | return default.DefaultDialect() 128 | else: 129 | d = URL.create(name).get_dialect()() 130 | 131 | if name == "postgresql": 132 | d.implicit_returning = True 133 | elif name == "mssql": 134 | d.legacy_schema_aliasing = False 135 | return d 136 | 137 | 138 | def expect_warnings(*messages, **kw): 139 | """Context manager which expects one or more warnings. 140 | 141 | With no arguments, squelches all SAWarnings emitted via 142 | sqlalchemy.util.warn and sqlalchemy.util.warn_limited. Otherwise 143 | pass string expressions that will match selected warnings via regex; 144 | all non-matching warnings are sent through. 145 | 146 | The expect version **asserts** that the warnings were in fact seen. 147 | 148 | Note that the test suite sets SAWarning warnings to raise exceptions. 149 | 150 | """ 151 | return _expect_warnings(Warning, messages, **kw) 152 | 153 | 154 | def emits_python_deprecation_warning(*messages): 155 | """Decorator form of expect_warnings(). 156 | 157 | Note that emits_warning does **not** assert that the warnings 158 | were in fact seen. 159 | 160 | """ 161 | 162 | @decorator 163 | def decorate(fn, *args, **kw): 164 | with _expect_warnings(DeprecationWarning, assert_=False, *messages): 165 | return fn(*args, **kw) 166 | 167 | return decorate 168 | 169 | 170 | def expect_deprecated(*messages, **kw): 171 | return _expect_warnings(DeprecationWarning, messages, **kw) 172 | 173 | 174 | def expect_sqlalchemy_deprecated(*messages, **kw): 175 | return _expect_warnings(sa_exc.SADeprecationWarning, messages, **kw) 176 | 177 | 178 | def expect_sqlalchemy_deprecated_20(*messages, **kw): 179 | return _expect_warnings(sa_exc.RemovedIn20Warning, messages, **kw) 180 | -------------------------------------------------------------------------------- /alembic/testing/plugin/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/zzzeek/alembic/f0d65770c14dcaa19ec2c9270999bed2ef99a311/alembic/testing/plugin/__init__.py -------------------------------------------------------------------------------- /alembic/testing/plugin/bootstrap.py: -------------------------------------------------------------------------------- 1 | """ 2 | Bootstrapper for test framework plugins. 3 | 4 | """ 5 | -------------------------------------------------------------------------------- /alembic/testing/requirements.py: -------------------------------------------------------------------------------- 1 | from sqlalchemy.testing.requirements import Requirements 2 | 3 | from alembic import util 4 | from ..testing import exclusions 5 | 6 | 7 | class SuiteRequirements(Requirements): 8 | @property 9 | def schemas(self): 10 | """Target database must support external schemas, and have one 11 | named 'test_schema'.""" 12 | 13 | return exclusions.open() 14 | 15 | @property 16 | def autocommit_isolation(self): 17 | """target database should support 'AUTOCOMMIT' isolation level""" 18 | 19 | return exclusions.closed() 20 | 21 | @property 22 | def materialized_views(self): 23 | """needed for sqlalchemy compat""" 24 | return exclusions.closed() 25 | 26 | @property 27 | def unique_constraint_reflection(self): 28 | def doesnt_have_check_uq_constraints(config): 29 | from sqlalchemy import inspect 30 | 31 | insp = inspect(config.db) 32 | try: 33 | insp.get_unique_constraints("x") 34 | except NotImplementedError: 35 | return True 36 | except TypeError: 37 | return True 38 | except Exception: 39 | pass 40 | return False 41 | 42 | return exclusions.skip_if(doesnt_have_check_uq_constraints) 43 | 44 | @property 45 | def sequences(self): 46 | """Target database must support SEQUENCEs.""" 47 | 48 | return exclusions.only_if( 49 | [lambda config: config.db.dialect.supports_sequences], 50 | "no sequence support", 51 | ) 52 | 53 | @property 54 | def foreign_key_match(self): 55 | return exclusions.open() 56 | 57 | @property 58 | def foreign_key_constraint_reflection(self): 59 | return exclusions.open() 60 | 61 | @property 62 | def check_constraints_w_enforcement(self): 63 | """Target database must support check constraints 64 | and also enforce them.""" 65 | 66 | return exclusions.open() 67 | 68 | @property 69 | def reflects_pk_names(self): 70 | return exclusions.closed() 71 | 72 | @property 73 | def reflects_fk_options(self): 74 | return exclusions.closed() 75 | 76 | @property 77 | def sqlalchemy_1x(self): 78 | return exclusions.skip_if( 79 | lambda config: util.sqla_2, 80 | "SQLAlchemy 1.x test", 81 | ) 82 | 83 | @property 84 | def sqlalchemy_2(self): 85 | return exclusions.skip_if( 86 | lambda config: not util.sqla_2, 87 | "SQLAlchemy 2.x test", 88 | ) 89 | 90 | @property 91 | def asyncio(self): 92 | def go(config): 93 | try: 94 | import greenlet # noqa: F401 95 | except ImportError: 96 | return False 97 | else: 98 | return True 99 | 100 | return exclusions.only_if(go) 101 | 102 | @property 103 | def comments(self): 104 | return exclusions.only_if( 105 | lambda config: config.db.dialect.supports_comments 106 | ) 107 | 108 | @property 109 | def alter_column(self): 110 | return exclusions.open() 111 | 112 | @property 113 | def computed_columns(self): 114 | return exclusions.closed() 115 | 116 | @property 117 | def autoincrement_on_composite_pk(self): 118 | return exclusions.closed() 119 | 120 | @property 121 | def fk_ondelete_is_reflected(self): 122 | return exclusions.closed() 123 | 124 | @property 125 | def fk_onupdate_is_reflected(self): 126 | return exclusions.closed() 127 | 128 | @property 129 | def fk_onupdate(self): 130 | return exclusions.open() 131 | 132 | @property 133 | def fk_ondelete_restrict(self): 134 | return exclusions.open() 135 | 136 | @property 137 | def fk_onupdate_restrict(self): 138 | return exclusions.open() 139 | 140 | @property 141 | def fk_ondelete_noaction(self): 142 | return exclusions.open() 143 | 144 | @property 145 | def fk_initially(self): 146 | return exclusions.closed() 147 | 148 | @property 149 | def fk_deferrable(self): 150 | return exclusions.closed() 151 | 152 | @property 153 | def fk_deferrable_is_reflected(self): 154 | return exclusions.closed() 155 | 156 | @property 157 | def fk_names(self): 158 | return exclusions.open() 159 | 160 | @property 161 | def integer_subtype_comparisons(self): 162 | return exclusions.open() 163 | 164 | @property 165 | def no_name_normalize(self): 166 | return exclusions.skip_if( 167 | lambda config: config.db.dialect.requires_name_normalize 168 | ) 169 | 170 | @property 171 | def identity_columns(self): 172 | return exclusions.closed() 173 | 174 | @property 175 | def identity_columns_alter(self): 176 | return exclusions.closed() 177 | -------------------------------------------------------------------------------- /alembic/testing/schemacompare.py: -------------------------------------------------------------------------------- 1 | from itertools import zip_longest 2 | 3 | from sqlalchemy import schema 4 | from sqlalchemy.sql.elements import ClauseList 5 | 6 | 7 | class CompareTable: 8 | def __init__(self, table): 9 | self.table = table 10 | 11 | def __eq__(self, other): 12 | if self.table.name != other.name or self.table.schema != other.schema: 13 | return False 14 | 15 | for c1, c2 in zip_longest(self.table.c, other.c): 16 | if (c1 is None and c2 is not None) or ( 17 | c2 is None and c1 is not None 18 | ): 19 | return False 20 | if CompareColumn(c1) != c2: 21 | return False 22 | 23 | return True 24 | 25 | # TODO: compare constraints, indexes 26 | 27 | def __ne__(self, other): 28 | return not self.__eq__(other) 29 | 30 | 31 | class CompareColumn: 32 | def __init__(self, column): 33 | self.column = column 34 | 35 | def __eq__(self, other): 36 | return ( 37 | self.column.name == other.name 38 | and self.column.nullable == other.nullable 39 | ) 40 | # TODO: datatypes etc 41 | 42 | def __ne__(self, other): 43 | return not self.__eq__(other) 44 | 45 | 46 | class CompareIndex: 47 | def __init__(self, index, name_only=False): 48 | self.index = index 49 | self.name_only = name_only 50 | 51 | def __eq__(self, other): 52 | if self.name_only: 53 | return self.index.name == other.name 54 | else: 55 | return ( 56 | str(schema.CreateIndex(self.index)) 57 | == str(schema.CreateIndex(other)) 58 | and self.index.dialect_kwargs == other.dialect_kwargs 59 | ) 60 | 61 | def __ne__(self, other): 62 | return not self.__eq__(other) 63 | 64 | def __repr__(self): 65 | expr = ClauseList(*self.index.expressions) 66 | try: 67 | expr_str = expr.compile().string 68 | except Exception: 69 | expr_str = str(expr) 70 | return f"" 71 | 72 | 73 | class CompareCheckConstraint: 74 | def __init__(self, constraint): 75 | self.constraint = constraint 76 | 77 | def __eq__(self, other): 78 | return ( 79 | isinstance(other, schema.CheckConstraint) 80 | and self.constraint.name == other.name 81 | and (str(self.constraint.sqltext) == str(other.sqltext)) 82 | and (other.table.name == self.constraint.table.name) 83 | and other.table.schema == self.constraint.table.schema 84 | ) 85 | 86 | def __ne__(self, other): 87 | return not self.__eq__(other) 88 | 89 | 90 | class CompareForeignKey: 91 | def __init__(self, constraint): 92 | self.constraint = constraint 93 | 94 | def __eq__(self, other): 95 | r1 = ( 96 | isinstance(other, schema.ForeignKeyConstraint) 97 | and self.constraint.name == other.name 98 | and (other.table.name == self.constraint.table.name) 99 | and other.table.schema == self.constraint.table.schema 100 | ) 101 | if not r1: 102 | return False 103 | for c1, c2 in zip_longest(self.constraint.columns, other.columns): 104 | if (c1 is None and c2 is not None) or ( 105 | c2 is None and c1 is not None 106 | ): 107 | return False 108 | if CompareColumn(c1) != c2: 109 | return False 110 | return True 111 | 112 | def __ne__(self, other): 113 | return not self.__eq__(other) 114 | 115 | 116 | class ComparePrimaryKey: 117 | def __init__(self, constraint): 118 | self.constraint = constraint 119 | 120 | def __eq__(self, other): 121 | r1 = ( 122 | isinstance(other, schema.PrimaryKeyConstraint) 123 | and self.constraint.name == other.name 124 | and (other.table.name == self.constraint.table.name) 125 | and other.table.schema == self.constraint.table.schema 126 | ) 127 | if not r1: 128 | return False 129 | 130 | for c1, c2 in zip_longest(self.constraint.columns, other.columns): 131 | if (c1 is None and c2 is not None) or ( 132 | c2 is None and c1 is not None 133 | ): 134 | return False 135 | if CompareColumn(c1) != c2: 136 | return False 137 | 138 | return True 139 | 140 | def __ne__(self, other): 141 | return not self.__eq__(other) 142 | 143 | 144 | class CompareUniqueConstraint: 145 | def __init__(self, constraint): 146 | self.constraint = constraint 147 | 148 | def __eq__(self, other): 149 | r1 = ( 150 | isinstance(other, schema.UniqueConstraint) 151 | and self.constraint.name == other.name 152 | and (other.table.name == self.constraint.table.name) 153 | and other.table.schema == self.constraint.table.schema 154 | ) 155 | if not r1: 156 | return False 157 | 158 | for c1, c2 in zip_longest(self.constraint.columns, other.columns): 159 | if (c1 is None and c2 is not None) or ( 160 | c2 is None and c1 is not None 161 | ): 162 | return False 163 | if CompareColumn(c1) != c2: 164 | return False 165 | 166 | return True 167 | 168 | def __ne__(self, other): 169 | return not self.__eq__(other) 170 | -------------------------------------------------------------------------------- /alembic/testing/suite/__init__.py: -------------------------------------------------------------------------------- 1 | from .test_autogen_comments import * # noqa 2 | from .test_autogen_computed import * # noqa 3 | from .test_autogen_diffs import * # noqa 4 | from .test_autogen_fks import * # noqa 5 | from .test_autogen_identity import * # noqa 6 | from .test_environment import * # noqa 7 | from .test_op import * # noqa 8 | -------------------------------------------------------------------------------- /alembic/testing/suite/test_autogen_comments.py: -------------------------------------------------------------------------------- 1 | from sqlalchemy import Column 2 | from sqlalchemy import Float 3 | from sqlalchemy import MetaData 4 | from sqlalchemy import String 5 | from sqlalchemy import Table 6 | 7 | from ._autogen_fixtures import AutogenFixtureTest 8 | from ...testing import eq_ 9 | from ...testing import mock 10 | from ...testing import TestBase 11 | 12 | 13 | class AutogenerateCommentsTest(AutogenFixtureTest, TestBase): 14 | __backend__ = True 15 | 16 | __requires__ = ("comments",) 17 | 18 | def test_existing_table_comment_no_change(self): 19 | m1 = MetaData() 20 | m2 = MetaData() 21 | 22 | Table( 23 | "some_table", 24 | m1, 25 | Column("test", String(10), primary_key=True), 26 | comment="this is some table", 27 | ) 28 | 29 | Table( 30 | "some_table", 31 | m2, 32 | Column("test", String(10), primary_key=True), 33 | comment="this is some table", 34 | ) 35 | 36 | diffs = self._fixture(m1, m2) 37 | 38 | eq_(diffs, []) 39 | 40 | def test_add_table_comment(self): 41 | m1 = MetaData() 42 | m2 = MetaData() 43 | 44 | Table("some_table", m1, Column("test", String(10), primary_key=True)) 45 | 46 | Table( 47 | "some_table", 48 | m2, 49 | Column("test", String(10), primary_key=True), 50 | comment="this is some table", 51 | ) 52 | 53 | diffs = self._fixture(m1, m2) 54 | 55 | eq_(diffs[0][0], "add_table_comment") 56 | eq_(diffs[0][1].comment, "this is some table") 57 | eq_(diffs[0][2], None) 58 | 59 | def test_remove_table_comment(self): 60 | m1 = MetaData() 61 | m2 = MetaData() 62 | 63 | Table( 64 | "some_table", 65 | m1, 66 | Column("test", String(10), primary_key=True), 67 | comment="this is some table", 68 | ) 69 | 70 | Table("some_table", m2, Column("test", String(10), primary_key=True)) 71 | 72 | diffs = self._fixture(m1, m2) 73 | 74 | eq_(diffs[0][0], "remove_table_comment") 75 | eq_(diffs[0][1].comment, None) 76 | 77 | def test_alter_table_comment(self): 78 | m1 = MetaData() 79 | m2 = MetaData() 80 | 81 | Table( 82 | "some_table", 83 | m1, 84 | Column("test", String(10), primary_key=True), 85 | comment="this is some table", 86 | ) 87 | 88 | Table( 89 | "some_table", 90 | m2, 91 | Column("test", String(10), primary_key=True), 92 | comment="this is also some table", 93 | ) 94 | 95 | diffs = self._fixture(m1, m2) 96 | 97 | eq_(diffs[0][0], "add_table_comment") 98 | eq_(diffs[0][1].comment, "this is also some table") 99 | eq_(diffs[0][2], "this is some table") 100 | 101 | def test_existing_column_comment_no_change(self): 102 | m1 = MetaData() 103 | m2 = MetaData() 104 | 105 | Table( 106 | "some_table", 107 | m1, 108 | Column("test", String(10), primary_key=True), 109 | Column("amount", Float, comment="the amount"), 110 | ) 111 | 112 | Table( 113 | "some_table", 114 | m2, 115 | Column("test", String(10), primary_key=True), 116 | Column("amount", Float, comment="the amount"), 117 | ) 118 | 119 | diffs = self._fixture(m1, m2) 120 | 121 | eq_(diffs, []) 122 | 123 | def test_add_column_comment(self): 124 | m1 = MetaData() 125 | m2 = MetaData() 126 | 127 | Table( 128 | "some_table", 129 | m1, 130 | Column("test", String(10), primary_key=True), 131 | Column("amount", Float), 132 | ) 133 | 134 | Table( 135 | "some_table", 136 | m2, 137 | Column("test", String(10), primary_key=True), 138 | Column("amount", Float, comment="the amount"), 139 | ) 140 | 141 | diffs = self._fixture(m1, m2) 142 | eq_( 143 | diffs, 144 | [ 145 | [ 146 | ( 147 | "modify_comment", 148 | None, 149 | "some_table", 150 | "amount", 151 | { 152 | "existing_nullable": True, 153 | "existing_type": mock.ANY, 154 | "existing_server_default": False, 155 | }, 156 | None, 157 | "the amount", 158 | ) 159 | ] 160 | ], 161 | ) 162 | 163 | def test_remove_column_comment(self): 164 | m1 = MetaData() 165 | m2 = MetaData() 166 | 167 | Table( 168 | "some_table", 169 | m1, 170 | Column("test", String(10), primary_key=True), 171 | Column("amount", Float, comment="the amount"), 172 | ) 173 | 174 | Table( 175 | "some_table", 176 | m2, 177 | Column("test", String(10), primary_key=True), 178 | Column("amount", Float), 179 | ) 180 | 181 | diffs = self._fixture(m1, m2) 182 | eq_( 183 | diffs, 184 | [ 185 | [ 186 | ( 187 | "modify_comment", 188 | None, 189 | "some_table", 190 | "amount", 191 | { 192 | "existing_nullable": True, 193 | "existing_type": mock.ANY, 194 | "existing_server_default": False, 195 | }, 196 | "the amount", 197 | None, 198 | ) 199 | ] 200 | ], 201 | ) 202 | 203 | def test_alter_column_comment(self): 204 | m1 = MetaData() 205 | m2 = MetaData() 206 | 207 | Table( 208 | "some_table", 209 | m1, 210 | Column("test", String(10), primary_key=True), 211 | Column("amount", Float, comment="the amount"), 212 | ) 213 | 214 | Table( 215 | "some_table", 216 | m2, 217 | Column("test", String(10), primary_key=True), 218 | Column("amount", Float, comment="the adjusted amount"), 219 | ) 220 | 221 | diffs = self._fixture(m1, m2) 222 | 223 | eq_( 224 | diffs, 225 | [ 226 | [ 227 | ( 228 | "modify_comment", 229 | None, 230 | "some_table", 231 | "amount", 232 | { 233 | "existing_nullable": True, 234 | "existing_type": mock.ANY, 235 | "existing_server_default": False, 236 | }, 237 | "the amount", 238 | "the adjusted amount", 239 | ) 240 | ] 241 | ], 242 | ) 243 | -------------------------------------------------------------------------------- /alembic/testing/suite/test_autogen_computed.py: -------------------------------------------------------------------------------- 1 | import sqlalchemy as sa 2 | from sqlalchemy import Column 3 | from sqlalchemy import Integer 4 | from sqlalchemy import MetaData 5 | from sqlalchemy import Table 6 | 7 | from ._autogen_fixtures import AutogenFixtureTest 8 | from ... import testing 9 | from ...testing import eq_ 10 | from ...testing import is_ 11 | from ...testing import is_true 12 | from ...testing import mock 13 | from ...testing import TestBase 14 | 15 | 16 | class AutogenerateComputedTest(AutogenFixtureTest, TestBase): 17 | __requires__ = ("computed_columns",) 18 | __backend__ = True 19 | 20 | def test_add_computed_column(self): 21 | m1 = MetaData() 22 | m2 = MetaData() 23 | 24 | Table("user", m1, Column("id", Integer, primary_key=True)) 25 | 26 | Table( 27 | "user", 28 | m2, 29 | Column("id", Integer, primary_key=True), 30 | Column("foo", Integer, sa.Computed("5")), 31 | ) 32 | 33 | diffs = self._fixture(m1, m2) 34 | 35 | eq_(diffs[0][0], "add_column") 36 | eq_(diffs[0][2], "user") 37 | eq_(diffs[0][3].name, "foo") 38 | c = diffs[0][3].computed 39 | 40 | is_true(isinstance(c, sa.Computed)) 41 | is_(c.persisted, None) 42 | eq_(str(c.sqltext), "5") 43 | 44 | def test_remove_computed_column(self): 45 | m1 = MetaData() 46 | m2 = MetaData() 47 | 48 | Table( 49 | "user", 50 | m1, 51 | Column("id", Integer, primary_key=True), 52 | Column("foo", Integer, sa.Computed("5")), 53 | ) 54 | 55 | Table("user", m2, Column("id", Integer, primary_key=True)) 56 | 57 | diffs = self._fixture(m1, m2) 58 | 59 | eq_(diffs[0][0], "remove_column") 60 | eq_(diffs[0][2], "user") 61 | c = diffs[0][3] 62 | eq_(c.name, "foo") 63 | 64 | is_true(isinstance(c.computed, sa.Computed)) 65 | is_true(isinstance(c.server_default, sa.Computed)) 66 | 67 | @testing.combinations( 68 | lambda: (None, sa.Computed("bar*5")), 69 | (lambda: (sa.Computed("bar*5"), None)), 70 | lambda: ( 71 | sa.Computed("bar*5"), 72 | sa.Computed("bar * 42", persisted=True), 73 | ), 74 | lambda: (sa.Computed("bar*5"), sa.Computed("bar * 42")), 75 | ) 76 | def test_cant_change_computed_warning(self, test_case): 77 | arg_before, arg_after = testing.resolve_lambda(test_case, **locals()) 78 | m1 = MetaData() 79 | m2 = MetaData() 80 | 81 | arg_before = [] if arg_before is None else [arg_before] 82 | arg_after = [] if arg_after is None else [arg_after] 83 | 84 | Table( 85 | "user", 86 | m1, 87 | Column("id", Integer, primary_key=True), 88 | Column("bar", Integer), 89 | Column("foo", Integer, *arg_before), 90 | ) 91 | 92 | Table( 93 | "user", 94 | m2, 95 | Column("id", Integer, primary_key=True), 96 | Column("bar", Integer), 97 | Column("foo", Integer, *arg_after), 98 | ) 99 | 100 | with mock.patch("alembic.util.warn") as mock_warn: 101 | diffs = self._fixture(m1, m2) 102 | 103 | eq_( 104 | mock_warn.mock_calls, 105 | [mock.call("Computed default on user.foo cannot be modified")], 106 | ) 107 | 108 | eq_(list(diffs), []) 109 | 110 | @testing.combinations( 111 | lambda: (None, None), 112 | lambda: (sa.Computed("5"), sa.Computed("5")), 113 | lambda: (sa.Computed("bar*5"), sa.Computed("bar*5")), 114 | lambda: (sa.Computed("bar*5"), sa.Computed("bar * \r\n\t5")), 115 | ) 116 | def test_computed_unchanged(self, test_case): 117 | arg_before, arg_after = testing.resolve_lambda(test_case, **locals()) 118 | m1 = MetaData() 119 | m2 = MetaData() 120 | 121 | arg_before = [] if arg_before is None else [arg_before] 122 | arg_after = [] if arg_after is None else [arg_after] 123 | 124 | Table( 125 | "user", 126 | m1, 127 | Column("id", Integer, primary_key=True), 128 | Column("bar", Integer), 129 | Column("foo", Integer, *arg_before), 130 | ) 131 | 132 | Table( 133 | "user", 134 | m2, 135 | Column("id", Integer, primary_key=True), 136 | Column("bar", Integer), 137 | Column("foo", Integer, *arg_after), 138 | ) 139 | 140 | with mock.patch("alembic.util.warn") as mock_warn: 141 | diffs = self._fixture(m1, m2) 142 | eq_(mock_warn.mock_calls, []) 143 | 144 | eq_(list(diffs), []) 145 | -------------------------------------------------------------------------------- /alembic/testing/suite/test_autogen_identity.py: -------------------------------------------------------------------------------- 1 | import sqlalchemy as sa 2 | from sqlalchemy import Column 3 | from sqlalchemy import Integer 4 | from sqlalchemy import MetaData 5 | from sqlalchemy import Table 6 | 7 | from alembic.util import sqla_compat 8 | from ._autogen_fixtures import AutogenFixtureTest 9 | from ... import testing 10 | from ...testing import config 11 | from ...testing import eq_ 12 | from ...testing import is_true 13 | from ...testing import TestBase 14 | 15 | 16 | class AutogenerateIdentityTest(AutogenFixtureTest, TestBase): 17 | __requires__ = ("identity_columns",) 18 | __backend__ = True 19 | 20 | def test_add_identity_column(self): 21 | m1 = MetaData() 22 | m2 = MetaData() 23 | 24 | Table("user", m1, Column("other", sa.Text)) 25 | 26 | Table( 27 | "user", 28 | m2, 29 | Column("other", sa.Text), 30 | Column( 31 | "id", 32 | Integer, 33 | sa.Identity(start=5, increment=7), 34 | primary_key=True, 35 | ), 36 | ) 37 | 38 | diffs = self._fixture(m1, m2) 39 | 40 | eq_(diffs[0][0], "add_column") 41 | eq_(diffs[0][2], "user") 42 | eq_(diffs[0][3].name, "id") 43 | i = diffs[0][3].identity 44 | 45 | is_true(isinstance(i, sa.Identity)) 46 | eq_(i.start, 5) 47 | eq_(i.increment, 7) 48 | 49 | def test_remove_identity_column(self): 50 | m1 = MetaData() 51 | m2 = MetaData() 52 | 53 | Table( 54 | "user", 55 | m1, 56 | Column( 57 | "id", 58 | Integer, 59 | sa.Identity(start=2, increment=3), 60 | primary_key=True, 61 | ), 62 | ) 63 | 64 | Table("user", m2) 65 | 66 | diffs = self._fixture(m1, m2) 67 | 68 | eq_(diffs[0][0], "remove_column") 69 | eq_(diffs[0][2], "user") 70 | c = diffs[0][3] 71 | eq_(c.name, "id") 72 | 73 | is_true(isinstance(c.identity, sa.Identity)) 74 | eq_(c.identity.start, 2) 75 | eq_(c.identity.increment, 3) 76 | 77 | def test_no_change_identity_column(self): 78 | m1 = MetaData() 79 | m2 = MetaData() 80 | 81 | for m in (m1, m2): 82 | id_ = sa.Identity(start=2) 83 | Table("user", m, Column("id", Integer, id_)) 84 | 85 | diffs = self._fixture(m1, m2) 86 | 87 | eq_(diffs, []) 88 | 89 | def test_dialect_kwargs_changes(self): 90 | m1 = MetaData() 91 | m2 = MetaData() 92 | 93 | if sqla_compat.identity_has_dialect_kwargs: 94 | args = {"oracle_on_null": True, "oracle_order": True} 95 | else: 96 | args = {"on_null": True, "order": True} 97 | 98 | Table("user", m1, Column("id", Integer, sa.Identity(start=2))) 99 | id_ = sa.Identity(start=2, **args) 100 | Table("user", m2, Column("id", Integer, id_)) 101 | 102 | diffs = self._fixture(m1, m2) 103 | if config.db.name == "oracle": 104 | is_true(len(diffs), 1) 105 | eq_(diffs[0][0][0], "modify_default") 106 | else: 107 | eq_(diffs, []) 108 | 109 | @testing.combinations( 110 | (None, dict(start=2)), 111 | (dict(start=2), None), 112 | (dict(start=2), dict(start=2, increment=7)), 113 | (dict(always=False), dict(always=True)), 114 | ( 115 | dict(start=1, minvalue=0, maxvalue=100, cycle=True), 116 | dict(start=1, minvalue=0, maxvalue=100, cycle=False), 117 | ), 118 | ( 119 | dict(start=10, increment=3, maxvalue=9999), 120 | dict(start=10, increment=1, maxvalue=3333), 121 | ), 122 | ) 123 | @config.requirements.identity_columns_alter 124 | def test_change_identity(self, before, after): 125 | arg_before = (sa.Identity(**before),) if before else () 126 | arg_after = (sa.Identity(**after),) if after else () 127 | 128 | m1 = MetaData() 129 | m2 = MetaData() 130 | 131 | Table( 132 | "user", 133 | m1, 134 | Column("id", Integer, *arg_before), 135 | Column("other", sa.Text), 136 | ) 137 | 138 | Table( 139 | "user", 140 | m2, 141 | Column("id", Integer, *arg_after), 142 | Column("other", sa.Text), 143 | ) 144 | 145 | diffs = self._fixture(m1, m2) 146 | 147 | eq_(len(diffs[0]), 1) 148 | diffs = diffs[0][0] 149 | eq_(diffs[0], "modify_default") 150 | eq_(diffs[2], "user") 151 | eq_(diffs[3], "id") 152 | old = diffs[5] 153 | new = diffs[6] 154 | 155 | def check(kw, idt): 156 | if kw: 157 | is_true(isinstance(idt, sa.Identity)) 158 | for k, v in kw.items(): 159 | eq_(getattr(idt, k), v) 160 | else: 161 | is_true(idt in (None, False)) 162 | 163 | check(before, old) 164 | check(after, new) 165 | 166 | def test_add_identity_to_column(self): 167 | m1 = MetaData() 168 | m2 = MetaData() 169 | 170 | Table( 171 | "user", 172 | m1, 173 | Column("id", Integer), 174 | Column("other", sa.Text), 175 | ) 176 | 177 | Table( 178 | "user", 179 | m2, 180 | Column("id", Integer, sa.Identity(start=2, maxvalue=1000)), 181 | Column("other", sa.Text), 182 | ) 183 | 184 | diffs = self._fixture(m1, m2) 185 | 186 | eq_(len(diffs[0]), 1) 187 | diffs = diffs[0][0] 188 | eq_(diffs[0], "modify_default") 189 | eq_(diffs[2], "user") 190 | eq_(diffs[3], "id") 191 | eq_(diffs[5], None) 192 | added = diffs[6] 193 | 194 | is_true(isinstance(added, sa.Identity)) 195 | eq_(added.start, 2) 196 | eq_(added.maxvalue, 1000) 197 | 198 | def test_remove_identity_from_column(self): 199 | m1 = MetaData() 200 | m2 = MetaData() 201 | 202 | Table( 203 | "user", 204 | m1, 205 | Column("id", Integer, sa.Identity(start=2, maxvalue=1000)), 206 | Column("other", sa.Text), 207 | ) 208 | 209 | Table( 210 | "user", 211 | m2, 212 | Column("id", Integer), 213 | Column("other", sa.Text), 214 | ) 215 | 216 | diffs = self._fixture(m1, m2) 217 | 218 | eq_(len(diffs[0]), 1) 219 | diffs = diffs[0][0] 220 | eq_(diffs[0], "modify_default") 221 | eq_(diffs[2], "user") 222 | eq_(diffs[3], "id") 223 | eq_(diffs[6], None) 224 | removed = diffs[5] 225 | 226 | is_true(isinstance(removed, sa.Identity)) 227 | -------------------------------------------------------------------------------- /alembic/testing/suite/test_op.py: -------------------------------------------------------------------------------- 1 | """Test against the builders in the op.* module.""" 2 | 3 | from sqlalchemy import Column 4 | from sqlalchemy import event 5 | from sqlalchemy import Integer 6 | from sqlalchemy import String 7 | from sqlalchemy import Table 8 | from sqlalchemy.sql import text 9 | 10 | from ...testing.fixtures import AlterColRoundTripFixture 11 | from ...testing.fixtures import TestBase 12 | 13 | 14 | @event.listens_for(Table, "after_parent_attach") 15 | def _add_cols(table, metadata): 16 | if table.name == "tbl_with_auto_appended_column": 17 | table.append_column(Column("bat", Integer)) 18 | 19 | 20 | class BackendAlterColumnTest(AlterColRoundTripFixture, TestBase): 21 | __backend__ = True 22 | 23 | def test_rename_column(self): 24 | self._run_alter_col({}, {"name": "newname"}) 25 | 26 | def test_modify_type_int_str(self): 27 | self._run_alter_col({"type": Integer()}, {"type": String(50)}) 28 | 29 | def test_add_server_default_int(self): 30 | self._run_alter_col({"type": Integer}, {"server_default": text("5")}) 31 | 32 | def test_modify_server_default_int(self): 33 | self._run_alter_col( 34 | {"type": Integer, "server_default": text("2")}, 35 | {"server_default": text("5")}, 36 | ) 37 | 38 | def test_modify_nullable_to_non(self): 39 | self._run_alter_col({}, {"nullable": False}) 40 | 41 | def test_modify_non_nullable_to_nullable(self): 42 | self._run_alter_col({"nullable": False}, {"nullable": True}) 43 | -------------------------------------------------------------------------------- /alembic/testing/util.py: -------------------------------------------------------------------------------- 1 | # testing/util.py 2 | # Copyright (C) 2005-2019 the SQLAlchemy authors and contributors 3 | # 4 | # 5 | # This module is part of SQLAlchemy and is released under 6 | # the MIT License: http://www.opensource.org/licenses/mit-license.php 7 | from __future__ import annotations 8 | 9 | import types 10 | from typing import Union 11 | 12 | from sqlalchemy.util import inspect_getfullargspec 13 | 14 | from ..util import sqla_2 15 | 16 | 17 | def flag_combinations(*combinations): 18 | """A facade around @testing.combinations() oriented towards boolean 19 | keyword-based arguments. 20 | 21 | Basically generates a nice looking identifier based on the keywords 22 | and also sets up the argument names. 23 | 24 | E.g.:: 25 | 26 | @testing.flag_combinations( 27 | dict(lazy=False, passive=False), 28 | dict(lazy=True, passive=False), 29 | dict(lazy=False, passive=True), 30 | dict(lazy=False, passive=True, raiseload=True), 31 | ) 32 | 33 | 34 | would result in:: 35 | 36 | @testing.combinations( 37 | ('', False, False, False), 38 | ('lazy', True, False, False), 39 | ('lazy_passive', True, True, False), 40 | ('lazy_passive', True, True, True), 41 | id_='iaaa', 42 | argnames='lazy,passive,raiseload' 43 | ) 44 | 45 | """ 46 | from sqlalchemy.testing import config 47 | 48 | keys = set() 49 | 50 | for d in combinations: 51 | keys.update(d) 52 | 53 | keys = sorted(keys) 54 | 55 | return config.combinations( 56 | *[ 57 | ("_".join(k for k in keys if d.get(k, False)),) 58 | + tuple(d.get(k, False) for k in keys) 59 | for d in combinations 60 | ], 61 | id_="i" + ("a" * len(keys)), 62 | argnames=",".join(keys), 63 | ) 64 | 65 | 66 | def resolve_lambda(__fn, **kw): 67 | """Given a no-arg lambda and a namespace, return a new lambda that 68 | has all the values filled in. 69 | 70 | This is used so that we can have module-level fixtures that 71 | refer to instance-level variables using lambdas. 72 | 73 | """ 74 | 75 | pos_args = inspect_getfullargspec(__fn)[0] 76 | pass_pos_args = {arg: kw.pop(arg) for arg in pos_args} 77 | glb = dict(__fn.__globals__) 78 | glb.update(kw) 79 | new_fn = types.FunctionType(__fn.__code__, glb) 80 | return new_fn(**pass_pos_args) 81 | 82 | 83 | def metadata_fixture(ddl="function"): 84 | """Provide MetaData for a pytest fixture.""" 85 | 86 | from sqlalchemy.testing import config 87 | from . import fixture_functions 88 | 89 | def decorate(fn): 90 | def run_ddl(self): 91 | from sqlalchemy import schema 92 | 93 | metadata = self.metadata = schema.MetaData() 94 | try: 95 | result = fn(self, metadata) 96 | metadata.create_all(config.db) 97 | # TODO: 98 | # somehow get a per-function dml erase fixture here 99 | yield result 100 | finally: 101 | metadata.drop_all(config.db) 102 | 103 | return fixture_functions.fixture(scope=ddl)(run_ddl) 104 | 105 | return decorate 106 | 107 | 108 | def _safe_int(value: str) -> Union[int, str]: 109 | try: 110 | return int(value) 111 | except: 112 | return value 113 | 114 | 115 | def testing_engine(url=None, options=None, future=False): 116 | from sqlalchemy.testing import config 117 | from sqlalchemy.testing.engines import testing_engine 118 | 119 | if not future: 120 | future = getattr(config._current.options, "future_engine", False) 121 | 122 | if not sqla_2: 123 | kw = {"future": future} if future else {} 124 | else: 125 | kw = {} 126 | return testing_engine(url, options, **kw) 127 | -------------------------------------------------------------------------------- /alembic/testing/warnings.py: -------------------------------------------------------------------------------- 1 | # testing/warnings.py 2 | # Copyright (C) 2005-2021 the SQLAlchemy authors and contributors 3 | # 4 | # 5 | # This module is part of SQLAlchemy and is released under 6 | # the MIT License: http://www.opensource.org/licenses/mit-license.php 7 | 8 | 9 | import warnings 10 | 11 | from sqlalchemy import exc as sa_exc 12 | 13 | 14 | def setup_filters(): 15 | """Set global warning behavior for the test suite.""" 16 | 17 | warnings.resetwarnings() 18 | 19 | warnings.filterwarnings("error", category=sa_exc.SADeprecationWarning) 20 | warnings.filterwarnings("error", category=sa_exc.SAWarning) 21 | 22 | # some selected deprecations... 23 | warnings.filterwarnings("error", category=DeprecationWarning) 24 | try: 25 | import pytest 26 | except ImportError: 27 | pass 28 | else: 29 | warnings.filterwarnings( 30 | "once", category=pytest.PytestDeprecationWarning 31 | ) 32 | -------------------------------------------------------------------------------- /alembic/util/__init__.py: -------------------------------------------------------------------------------- 1 | from .editor import open_in_editor as open_in_editor 2 | from .exc import AutogenerateDiffsDetected as AutogenerateDiffsDetected 3 | from .exc import CommandError as CommandError 4 | from .langhelpers import _with_legacy_names as _with_legacy_names 5 | from .langhelpers import asbool as asbool 6 | from .langhelpers import dedupe_tuple as dedupe_tuple 7 | from .langhelpers import Dispatcher as Dispatcher 8 | from .langhelpers import EMPTY_DICT as EMPTY_DICT 9 | from .langhelpers import immutabledict as immutabledict 10 | from .langhelpers import memoized_property as memoized_property 11 | from .langhelpers import ModuleClsProxy as ModuleClsProxy 12 | from .langhelpers import not_none as not_none 13 | from .langhelpers import rev_id as rev_id 14 | from .langhelpers import to_list as to_list 15 | from .langhelpers import to_tuple as to_tuple 16 | from .langhelpers import unique_list as unique_list 17 | from .messaging import err as err 18 | from .messaging import format_as_comma as format_as_comma 19 | from .messaging import msg as msg 20 | from .messaging import obfuscate_url_pw as obfuscate_url_pw 21 | from .messaging import status as status 22 | from .messaging import warn as warn 23 | from .messaging import warn_deprecated as warn_deprecated 24 | from .messaging import write_outstream as write_outstream 25 | from .pyfiles import coerce_resource_to_filename as coerce_resource_to_filename 26 | from .pyfiles import load_python_file as load_python_file 27 | from .pyfiles import pyc_file_from_path as pyc_file_from_path 28 | from .pyfiles import template_to_file as template_to_file 29 | from .sqla_compat import sqla_2 as sqla_2 30 | -------------------------------------------------------------------------------- /alembic/util/compat.py: -------------------------------------------------------------------------------- 1 | # mypy: no-warn-unused-ignores 2 | 3 | from __future__ import annotations 4 | 5 | from configparser import ConfigParser 6 | import io 7 | import os 8 | from pathlib import Path 9 | import sys 10 | import typing 11 | from typing import Any 12 | from typing import Iterator 13 | from typing import List 14 | from typing import Optional 15 | from typing import Sequence 16 | from typing import Union 17 | 18 | if True: 19 | # zimports hack for too-long names 20 | from sqlalchemy.util import ( # noqa: F401 21 | inspect_getfullargspec as inspect_getfullargspec, 22 | ) 23 | from sqlalchemy.util.compat import ( # noqa: F401 24 | inspect_formatargspec as inspect_formatargspec, 25 | ) 26 | 27 | is_posix = os.name == "posix" 28 | 29 | py314 = sys.version_info >= (3, 14) 30 | py313 = sys.version_info >= (3, 13) 31 | py312 = sys.version_info >= (3, 12) 32 | py311 = sys.version_info >= (3, 11) 33 | py310 = sys.version_info >= (3, 10) 34 | py39 = sys.version_info >= (3, 9) 35 | 36 | 37 | # produce a wrapper that allows encoded text to stream 38 | # into a given buffer, but doesn't close it. 39 | # not sure of a more idiomatic approach to this. 40 | class EncodedIO(io.TextIOWrapper): 41 | def close(self) -> None: 42 | pass 43 | 44 | 45 | if py39: 46 | from importlib import resources as _resources 47 | 48 | importlib_resources = _resources 49 | from importlib import metadata as _metadata 50 | 51 | importlib_metadata = _metadata 52 | from importlib.metadata import EntryPoint as EntryPoint 53 | else: 54 | import importlib_resources # type:ignore # noqa 55 | import importlib_metadata # type:ignore # noqa 56 | from importlib_metadata import EntryPoint # type:ignore # noqa 57 | 58 | if py311: 59 | import tomllib as tomllib 60 | else: 61 | import tomli as tomllib # type: ignore # noqa 62 | 63 | 64 | if py312: 65 | 66 | def path_walk( 67 | path: Path, *, top_down: bool = True 68 | ) -> Iterator[tuple[Path, list[str], list[str]]]: 69 | return Path.walk(path) 70 | 71 | def path_relative_to( 72 | path: Path, other: Path, *, walk_up: bool = False 73 | ) -> Path: 74 | return path.relative_to(other, walk_up=walk_up) 75 | 76 | else: 77 | 78 | def path_walk( 79 | path: Path, *, top_down: bool = True 80 | ) -> Iterator[tuple[Path, list[str], list[str]]]: 81 | for root, dirs, files in os.walk(path, topdown=top_down): 82 | yield Path(root), dirs, files 83 | 84 | def path_relative_to( 85 | path: Path, other: Path, *, walk_up: bool = False 86 | ) -> Path: 87 | """ 88 | Calculate the relative path of 'path' with respect to 'other', 89 | optionally allowing 'path' to be outside the subtree of 'other'. 90 | 91 | OK I used AI for this, sorry 92 | 93 | """ 94 | try: 95 | return path.relative_to(other) 96 | except ValueError: 97 | if walk_up: 98 | other_ancestors = list(other.parents) + [other] 99 | for ancestor in other_ancestors: 100 | try: 101 | return path.relative_to(ancestor) 102 | except ValueError: 103 | continue 104 | raise ValueError( 105 | f"{path} is not in the same subtree as {other}" 106 | ) 107 | else: 108 | raise 109 | 110 | 111 | def importlib_metadata_get(group: str) -> Sequence[EntryPoint]: 112 | ep = importlib_metadata.entry_points() 113 | if hasattr(ep, "select"): 114 | return ep.select(group=group) 115 | else: 116 | return ep.get(group, ()) # type: ignore 117 | 118 | 119 | def formatannotation_fwdref( 120 | annotation: Any, base_module: Optional[Any] = None 121 | ) -> str: 122 | """vendored from python 3.7""" 123 | # copied over _formatannotation from sqlalchemy 2.0 124 | 125 | if isinstance(annotation, str): 126 | return annotation 127 | 128 | if getattr(annotation, "__module__", None) == "typing": 129 | return repr(annotation).replace("typing.", "").replace("~", "") 130 | if isinstance(annotation, type): 131 | if annotation.__module__ in ("builtins", base_module): 132 | return repr(annotation.__qualname__) 133 | return annotation.__module__ + "." + annotation.__qualname__ 134 | elif isinstance(annotation, typing.TypeVar): 135 | return repr(annotation).replace("~", "") 136 | return repr(annotation).replace("~", "") 137 | 138 | 139 | def read_config_parser( 140 | file_config: ConfigParser, 141 | file_argument: Sequence[Union[str, os.PathLike[str]]], 142 | ) -> List[str]: 143 | if py310: 144 | return file_config.read(file_argument, encoding="locale") 145 | else: 146 | return file_config.read(file_argument) 147 | -------------------------------------------------------------------------------- /alembic/util/editor.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import os 4 | from os.path import exists 5 | from os.path import join 6 | from os.path import splitext 7 | from subprocess import check_call 8 | from typing import Dict 9 | from typing import List 10 | from typing import Mapping 11 | from typing import Optional 12 | 13 | from .compat import is_posix 14 | from .exc import CommandError 15 | 16 | 17 | def open_in_editor( 18 | filename: str, environ: Optional[Dict[str, str]] = None 19 | ) -> None: 20 | """ 21 | Opens the given file in a text editor. If the environment variable 22 | ``EDITOR`` is set, this is taken as preference. 23 | 24 | Otherwise, a list of commonly installed editors is tried. 25 | 26 | If no editor matches, an :py:exc:`OSError` is raised. 27 | 28 | :param filename: The filename to open. Will be passed verbatim to the 29 | editor command. 30 | :param environ: An optional drop-in replacement for ``os.environ``. Used 31 | mainly for testing. 32 | """ 33 | env = os.environ if environ is None else environ 34 | try: 35 | editor = _find_editor(env) 36 | check_call([editor, filename]) 37 | except Exception as exc: 38 | raise CommandError("Error executing editor (%s)" % (exc,)) from exc 39 | 40 | 41 | def _find_editor(environ: Mapping[str, str]) -> str: 42 | candidates = _default_editors() 43 | for i, var in enumerate(("EDITOR", "VISUAL")): 44 | if var in environ: 45 | user_choice = environ[var] 46 | if exists(user_choice): 47 | return user_choice 48 | if os.sep not in user_choice: 49 | candidates.insert(i, user_choice) 50 | 51 | for candidate in candidates: 52 | path = _find_executable(candidate, environ) 53 | if path is not None: 54 | return path 55 | raise OSError( 56 | "No suitable editor found. Please set the " 57 | '"EDITOR" or "VISUAL" environment variables' 58 | ) 59 | 60 | 61 | def _find_executable( 62 | candidate: str, environ: Mapping[str, str] 63 | ) -> Optional[str]: 64 | # Assuming this is on the PATH, we need to determine it's absolute 65 | # location. Otherwise, ``check_call`` will fail 66 | if not is_posix and splitext(candidate)[1] != ".exe": 67 | candidate += ".exe" 68 | for path in environ.get("PATH", "").split(os.pathsep): 69 | value = join(path, candidate) 70 | if exists(value): 71 | return value 72 | return None 73 | 74 | 75 | def _default_editors() -> List[str]: 76 | # Look for an editor. Prefer the user's choice by env-var, fall back to 77 | # most commonly installed editor (nano/vim) 78 | if is_posix: 79 | return ["sensible-editor", "editor", "nano", "vim", "code"] 80 | else: 81 | return ["code.exe", "notepad++.exe", "notepad.exe"] 82 | -------------------------------------------------------------------------------- /alembic/util/exc.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from typing import Any 4 | from typing import List 5 | from typing import Tuple 6 | from typing import TYPE_CHECKING 7 | 8 | if TYPE_CHECKING: 9 | from alembic.autogenerate import RevisionContext 10 | 11 | 12 | class CommandError(Exception): 13 | pass 14 | 15 | 16 | class AutogenerateDiffsDetected(CommandError): 17 | def __init__( 18 | self, 19 | message: str, 20 | revision_context: RevisionContext, 21 | diffs: List[Tuple[Any, ...]], 22 | ) -> None: 23 | super().__init__(message) 24 | self.revision_context = revision_context 25 | self.diffs = diffs 26 | -------------------------------------------------------------------------------- /alembic/util/messaging.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from collections.abc import Iterable 4 | from contextlib import contextmanager 5 | import logging 6 | import sys 7 | import textwrap 8 | from typing import Iterator 9 | from typing import Optional 10 | from typing import TextIO 11 | from typing import Union 12 | import warnings 13 | 14 | from sqlalchemy.engine import url 15 | 16 | log = logging.getLogger(__name__) 17 | 18 | # disable "no handler found" errors 19 | logging.getLogger("alembic").addHandler(logging.NullHandler()) 20 | 21 | 22 | try: 23 | import fcntl 24 | import termios 25 | import struct 26 | 27 | ioctl = fcntl.ioctl(0, termios.TIOCGWINSZ, struct.pack("HHHH", 0, 0, 0, 0)) 28 | _h, TERMWIDTH, _hp, _wp = struct.unpack("HHHH", ioctl) 29 | if TERMWIDTH <= 0: # can occur if running in emacs pseudo-tty 30 | TERMWIDTH = None 31 | except (ImportError, OSError): 32 | TERMWIDTH = None 33 | 34 | 35 | def write_outstream( 36 | stream: TextIO, *text: Union[str, bytes], quiet: bool = False 37 | ) -> None: 38 | if quiet: 39 | return 40 | encoding = getattr(stream, "encoding", "ascii") or "ascii" 41 | for t in text: 42 | if not isinstance(t, bytes): 43 | t = t.encode(encoding, "replace") 44 | t = t.decode(encoding) 45 | try: 46 | stream.write(t) 47 | except OSError: 48 | # suppress "broken pipe" errors. 49 | # no known way to handle this on Python 3 however 50 | # as the exception is "ignored" (noisily) in TextIOWrapper. 51 | break 52 | 53 | 54 | @contextmanager 55 | def status( 56 | status_msg: str, newline: bool = False, quiet: bool = False 57 | ) -> Iterator[None]: 58 | msg(status_msg + " ...", newline, flush=True, quiet=quiet) 59 | try: 60 | yield 61 | except: 62 | if not quiet: 63 | write_outstream(sys.stdout, " FAILED\n") 64 | raise 65 | else: 66 | if not quiet: 67 | write_outstream(sys.stdout, " done\n") 68 | 69 | 70 | def err(message: str, quiet: bool = False) -> None: 71 | log.error(message) 72 | msg(f"FAILED: {message}", quiet=quiet) 73 | sys.exit(-1) 74 | 75 | 76 | def obfuscate_url_pw(input_url: str) -> str: 77 | return url.make_url(input_url).render_as_string(hide_password=True) 78 | 79 | 80 | def warn(msg: str, stacklevel: int = 2) -> None: 81 | warnings.warn(msg, UserWarning, stacklevel=stacklevel) 82 | 83 | 84 | def warn_deprecated(msg: str, stacklevel: int = 2) -> None: 85 | warnings.warn(msg, DeprecationWarning, stacklevel=stacklevel) 86 | 87 | 88 | def msg( 89 | msg: str, newline: bool = True, flush: bool = False, quiet: bool = False 90 | ) -> None: 91 | if quiet: 92 | return 93 | if TERMWIDTH is None: 94 | write_outstream(sys.stdout, msg) 95 | if newline: 96 | write_outstream(sys.stdout, "\n") 97 | else: 98 | # left indent output lines 99 | indent = " " 100 | lines = textwrap.wrap( 101 | msg, 102 | TERMWIDTH, 103 | initial_indent=indent, 104 | subsequent_indent=indent, 105 | ) 106 | if len(lines) > 1: 107 | for line in lines[0:-1]: 108 | write_outstream(sys.stdout, line, "\n") 109 | write_outstream(sys.stdout, lines[-1], ("\n" if newline else "")) 110 | if flush: 111 | sys.stdout.flush() 112 | 113 | 114 | def format_as_comma(value: Optional[Union[str, Iterable[str]]]) -> str: 115 | if value is None: 116 | return "" 117 | elif isinstance(value, str): 118 | return value 119 | elif isinstance(value, Iterable): 120 | return ", ".join(value) 121 | else: 122 | raise ValueError("Don't know how to comma-format %r" % value) 123 | -------------------------------------------------------------------------------- /alembic/util/pyfiles.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import atexit 4 | from contextlib import ExitStack 5 | import importlib 6 | import importlib.machinery 7 | import importlib.util 8 | import os 9 | import pathlib 10 | import re 11 | import tempfile 12 | from types import ModuleType 13 | from typing import Any 14 | from typing import Optional 15 | from typing import Union 16 | 17 | from mako import exceptions 18 | from mako.template import Template 19 | 20 | from . import compat 21 | from .exc import CommandError 22 | 23 | 24 | def template_to_file( 25 | template_file: Union[str, os.PathLike[str]], 26 | dest: Union[str, os.PathLike[str]], 27 | output_encoding: str, 28 | *, 29 | append: bool = False, 30 | **kw: Any, 31 | ) -> None: 32 | template = Template(filename=_preserving_path_as_str(template_file)) 33 | try: 34 | output = template.render_unicode(**kw).encode(output_encoding) 35 | except: 36 | with tempfile.NamedTemporaryFile(suffix=".txt", delete=False) as ntf: 37 | ntf.write( 38 | exceptions.text_error_template() 39 | .render_unicode() 40 | .encode(output_encoding) 41 | ) 42 | fname = ntf.name 43 | raise CommandError( 44 | "Template rendering failed; see %s for a " 45 | "template-oriented traceback." % fname 46 | ) 47 | else: 48 | with open(dest, "ab" if append else "wb") as f: 49 | f.write(output) 50 | 51 | 52 | def coerce_resource_to_filename(fname_or_resource: str) -> pathlib.Path: 53 | """Interpret a filename as either a filesystem location or as a package 54 | resource. 55 | 56 | Names that are non absolute paths and contain a colon 57 | are interpreted as resources and coerced to a file location. 58 | 59 | """ 60 | # TODO: there seem to be zero tests for the package resource codepath 61 | if not os.path.isabs(fname_or_resource) and ":" in fname_or_resource: 62 | tokens = fname_or_resource.split(":") 63 | 64 | # from https://importlib-resources.readthedocs.io/en/latest/migration.html#pkg-resources-resource-filename # noqa E501 65 | 66 | file_manager = ExitStack() 67 | atexit.register(file_manager.close) 68 | 69 | ref = compat.importlib_resources.files(tokens[0]) 70 | for tok in tokens[1:]: 71 | ref = ref / tok 72 | fname_or_resource = file_manager.enter_context( # type: ignore[assignment] # noqa: E501 73 | compat.importlib_resources.as_file(ref) 74 | ) 75 | return pathlib.Path(fname_or_resource) 76 | 77 | 78 | def pyc_file_from_path( 79 | path: Union[str, os.PathLike[str]], 80 | ) -> Optional[pathlib.Path]: 81 | """Given a python source path, locate the .pyc.""" 82 | 83 | pathpath = pathlib.Path(path) 84 | candidate = pathlib.Path( 85 | importlib.util.cache_from_source(pathpath.as_posix()) 86 | ) 87 | if candidate.exists(): 88 | return candidate 89 | 90 | # even for pep3147, fall back to the old way of finding .pyc files, 91 | # to support sourceless operation 92 | ext = pathpath.suffix 93 | for ext in importlib.machinery.BYTECODE_SUFFIXES: 94 | if pathpath.with_suffix(ext).exists(): 95 | return pathpath.with_suffix(ext) 96 | else: 97 | return None 98 | 99 | 100 | def load_python_file( 101 | dir_: Union[str, os.PathLike[str]], filename: Union[str, os.PathLike[str]] 102 | ) -> ModuleType: 103 | """Load a file from the given path as a Python module.""" 104 | 105 | dir_ = pathlib.Path(dir_) 106 | filename_as_path = pathlib.Path(filename) 107 | filename = filename_as_path.name 108 | 109 | module_id = re.sub(r"\W", "_", filename) 110 | path = dir_ / filename 111 | ext = path.suffix 112 | if ext == ".py": 113 | if path.exists(): 114 | module = load_module_py(module_id, path) 115 | else: 116 | pyc_path = pyc_file_from_path(path) 117 | if pyc_path is None: 118 | raise ImportError("Can't find Python file %s" % path) 119 | else: 120 | module = load_module_py(module_id, pyc_path) 121 | elif ext in (".pyc", ".pyo"): 122 | module = load_module_py(module_id, path) 123 | else: 124 | assert False 125 | return module 126 | 127 | 128 | def load_module_py( 129 | module_id: str, path: Union[str, os.PathLike[str]] 130 | ) -> ModuleType: 131 | spec = importlib.util.spec_from_file_location(module_id, path) 132 | assert spec 133 | module = importlib.util.module_from_spec(spec) 134 | spec.loader.exec_module(module) # type: ignore 135 | return module 136 | 137 | 138 | def _preserving_path_as_str(path: Union[str, os.PathLike[str]]) -> str: 139 | """receive str/pathlike and return a string. 140 | 141 | Does not convert an incoming string path to a Path first, to help with 142 | unit tests that are doing string path round trips without OS-specific 143 | processing if not necessary. 144 | 145 | """ 146 | if isinstance(path, str): 147 | return path 148 | elif isinstance(path, pathlib.PurePath): 149 | return str(path) 150 | else: 151 | return str(pathlib.Path(path)) 152 | -------------------------------------------------------------------------------- /docs/build/Makefile: -------------------------------------------------------------------------------- 1 | # Makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line. 5 | SPHINXOPTS = 6 | SPHINXBUILD = sphinx-build 7 | AUTOBUILD = sphinx-autobuild --port 8080 --watch ../../alembic 8 | PAPER = 9 | BUILDDIR = output 10 | 11 | # Internal variables. 12 | PAPEROPT_a4 = -D latex_paper_size=a4 13 | PAPEROPT_letter = -D latex_paper_size=letter 14 | ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . 15 | 16 | .PHONY: help clean html autobuild dirhtml pickle json htmlhelp qthelp latex changes linkcheck doctest 17 | 18 | help: 19 | @echo "Please use \`make ' where is one of" 20 | @echo " html to make standalone HTML files" 21 | @echo " autobuild autobuild and run a webserver" 22 | @echo " dist-html same as html, but places files in /doc" 23 | @echo " dirhtml to make HTML files named index.html in directories" 24 | @echo " pickle to make pickle files" 25 | @echo " json to make JSON files" 26 | @echo " htmlhelp to make HTML files and a HTML help project" 27 | @echo " qthelp to make HTML files and a qthelp project" 28 | @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" 29 | @echo " changes to make an overview of all changed/added/deprecated items" 30 | @echo " linkcheck to check all external links for integrity" 31 | @echo " doctest to run all doctests embedded in the documentation (if enabled)" 32 | 33 | clean: 34 | -rm -rf $(BUILDDIR)/* 35 | 36 | html: 37 | $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html 38 | @echo 39 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." 40 | 41 | autobuild: 42 | $(AUTOBUILD) $(ALLSPHINXOPTS) $(BUILDDIR)/html 43 | 44 | dist-html: 45 | $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html 46 | cp -R $(BUILDDIR)/html/* ../ 47 | rm -fr $(BUILDDIR)/html 48 | @echo 49 | @echo "Build finished. The HTML pages are in ../." 50 | 51 | dirhtml: 52 | $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml 53 | @echo 54 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." 55 | 56 | pickle: 57 | $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle 58 | @echo 59 | @echo "Build finished; now you can process the pickle files." 60 | 61 | json: 62 | $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json 63 | @echo 64 | @echo "Build finished; now you can process the JSON files." 65 | 66 | htmlhelp: 67 | $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp 68 | @echo 69 | @echo "Build finished; now you can run HTML Help Workshop with the" \ 70 | ".hhp project file in $(BUILDDIR)/htmlhelp." 71 | 72 | qthelp: 73 | $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp 74 | @echo 75 | @echo "Build finished; now you can run "qcollectiongenerator" with the" \ 76 | ".qhcp project file in $(BUILDDIR)/qthelp, like this:" 77 | @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/Alembic.qhcp" 78 | @echo "To view the help file:" 79 | @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/Alembic.qhc" 80 | 81 | latex: 82 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 83 | @echo 84 | @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." 85 | @echo "Run \`make all-pdf' or \`make all-ps' in that directory to" \ 86 | "run these through (pdf)latex." 87 | 88 | changes: 89 | $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes 90 | @echo 91 | @echo "The overview file is in $(BUILDDIR)/changes." 92 | 93 | linkcheck: 94 | $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck 95 | @echo 96 | @echo "Link check complete; look for any errors in the above output " \ 97 | "or in $(BUILDDIR)/linkcheck/output.txt." 98 | 99 | doctest: 100 | $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest 101 | @echo "Testing of doctests in the sources finished, look at the " \ 102 | "results in $(BUILDDIR)/doctest/output.txt." 103 | -------------------------------------------------------------------------------- /docs/build/_static/nature_override.css: -------------------------------------------------------------------------------- 1 | @import url("nature.css"); 2 | @import url("site_custom_css.css"); 3 | 4 | 5 | .versionadded, .versionchanged, .deprecated { 6 | background-color: #FFFFCC; 7 | border: 1px solid #FFFF66; 8 | margin-bottom: 10px; 9 | margin-top: 10px; 10 | padding: 7px; 11 | } 12 | 13 | .versionadded > p > span, .versionchanged > p > span, .deprecated > p > span{ 14 | font-style: italic; 15 | } 16 | 17 | div.documentwrapper div.bodywrapper { margin-left: 350px;} 18 | div.document div.sphinxsidebar { width: 350px; } 19 | 20 | div.sphinxsidebarwrapper div { 21 | overflow: auto; 22 | } 23 | 24 | 25 | -------------------------------------------------------------------------------- /docs/build/_static/site_custom_css.css: -------------------------------------------------------------------------------- 1 | ul.simple p { 2 | margin-bottom: 1.15rem; 3 | } -------------------------------------------------------------------------------- /docs/build/_templates/site_custom_sidebars.html: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/zzzeek/alembic/f0d65770c14dcaa19ec2c9270999bed2ef99a311/docs/build/_templates/site_custom_sidebars.html -------------------------------------------------------------------------------- /docs/build/api/api_overview.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/zzzeek/alembic/f0d65770c14dcaa19ec2c9270999bed2ef99a311/docs/build/api/api_overview.png -------------------------------------------------------------------------------- /docs/build/api/commands.rst: -------------------------------------------------------------------------------- 1 | .. _alembic.command.toplevel: 2 | 3 | ========= 4 | Commands 5 | ========= 6 | 7 | .. note:: this section discusses the **internal API of Alembic** 8 | as regards its command invocation system. 9 | This section is only useful for developers who wish to extend the 10 | capabilities of Alembic. For documentation on using Alembic commands, 11 | please see :doc:`/tutorial`. 12 | 13 | Alembic commands are all represented by functions in the :ref:`alembic.command.toplevel` 14 | package. They all accept the same style of usage, being sent 15 | the :class:`.Config` object as the first argument. 16 | 17 | Commands can be run programmatically, by first constructing a :class:`.Config` 18 | object, as in:: 19 | 20 | from alembic.config import Config 21 | from alembic import command 22 | alembic_cfg = Config("/path/to/yourapp/alembic.ini") 23 | command.upgrade(alembic_cfg, "head") 24 | 25 | In many cases, and perhaps more often than not, an application will wish 26 | to call upon a series of Alembic commands and/or other features. It is 27 | usually a good idea to link multiple commands along a single connection 28 | and transaction, if feasible. This can be achieved using the 29 | :attr:`.Config.attributes` dictionary in order to share a connection:: 30 | 31 | with engine.begin() as connection: 32 | alembic_cfg.attributes['connection'] = connection 33 | command.upgrade(alembic_cfg, "head") 34 | 35 | This recipe requires that ``env.py`` consumes this connection argument; 36 | see the example in :ref:`connection_sharing` for details. 37 | 38 | To write small API functions that make direct use of database and script directory 39 | information, rather than just running one of the built-in commands, 40 | use the :class:`.ScriptDirectory` and :class:`.MigrationContext` 41 | classes directly. 42 | 43 | .. automodule:: alembic.command 44 | :members: 45 | -------------------------------------------------------------------------------- /docs/build/api/config.rst: -------------------------------------------------------------------------------- 1 | .. _alembic.config.toplevel: 2 | 3 | ============== 4 | Configuration 5 | ============== 6 | 7 | .. note:: this section discusses the **internal API of Alembic** as 8 | regards internal configuration constructs. 9 | This section is only useful for developers who wish to extend the 10 | capabilities of Alembic. For documentation on configuration of 11 | an Alembic environment, please see :doc:`/tutorial`. 12 | 13 | The :class:`.Config` object represents the configuration 14 | passed to the Alembic environment. From an API usage perspective, 15 | it is needed for the following use cases: 16 | 17 | * to create a :class:`.ScriptDirectory`, which allows you to work 18 | with the actual script files in a migration environment 19 | * to create an :class:`.EnvironmentContext`, which allows you to 20 | actually run the ``env.py`` module within the migration environment 21 | * to programmatically run any of the commands in the :ref:`alembic.command.toplevel` 22 | module. 23 | 24 | The :class:`.Config` is *not* needed for these cases: 25 | 26 | * to instantiate a :class:`.MigrationContext` directly - this object 27 | only needs a SQLAlchemy connection or dialect name. 28 | * to instantiate a :class:`.Operations` object - this object only 29 | needs a :class:`.MigrationContext`. 30 | 31 | .. automodule:: alembic.config 32 | :members: 33 | -------------------------------------------------------------------------------- /docs/build/api/ddl.rst: -------------------------------------------------------------------------------- 1 | .. _alembic.ddl.toplevel: 2 | 3 | ============= 4 | DDL Internals 5 | ============= 6 | 7 | These are some of the constructs used to generate migration 8 | instructions. The APIs here build off of the :class:`sqlalchemy.schema.DDLElement` 9 | and :ref:`sqlalchemy.ext.compiler_toplevel` systems. 10 | 11 | For programmatic usage of Alembic's migration directives, the easiest 12 | route is to use the higher level functions given by :ref:`alembic.operations.toplevel`. 13 | 14 | .. automodule:: alembic.ddl 15 | :members: 16 | :undoc-members: 17 | 18 | .. automodule:: alembic.ddl.base 19 | :members: 20 | :undoc-members: 21 | 22 | .. automodule:: alembic.ddl.impl 23 | :members: 24 | :undoc-members: 25 | 26 | MySQL 27 | ============= 28 | 29 | .. automodule:: alembic.ddl.mysql 30 | :members: 31 | :undoc-members: 32 | :show-inheritance: 33 | 34 | MS-SQL 35 | ============= 36 | 37 | .. automodule:: alembic.ddl.mssql 38 | :members: 39 | :undoc-members: 40 | :show-inheritance: 41 | 42 | Postgresql 43 | ============= 44 | 45 | .. automodule:: alembic.ddl.postgresql 46 | :members: 47 | :undoc-members: 48 | :show-inheritance: 49 | 50 | SQLite 51 | ============= 52 | 53 | .. automodule:: alembic.ddl.sqlite 54 | :members: 55 | :undoc-members: 56 | :show-inheritance: 57 | -------------------------------------------------------------------------------- /docs/build/api/index.rst: -------------------------------------------------------------------------------- 1 | .. _api: 2 | 3 | =========== 4 | API Details 5 | =========== 6 | 7 | Alembic's internal API has many public integration points that can be used 8 | to extend Alembic's functionality as well as to re-use its functionality 9 | in new ways. As the project has grown, more APIs are created and exposed 10 | for this purpose. 11 | 12 | Direct use of the vast majority of API details discussed here is not needed 13 | for rudimentary use of Alembic; the only API that is used normally by end users is 14 | the methods provided by the :class:`.Operations` class, which is discussed 15 | outside of this subsection, and the parameters that can be passed to 16 | the :meth:`.EnvironmentContext.configure` method, used when configuring 17 | one's ``env.py`` environment. However, real-world applications will 18 | usually end up using more of the internal API, in particular being able 19 | to run commands programmatically, as discussed in the section :doc:`/api/commands`. 20 | 21 | .. toctree:: 22 | :maxdepth: 2 23 | 24 | overview 25 | runtime 26 | config 27 | commands 28 | operations 29 | autogenerate 30 | script 31 | ddl 32 | 33 | -------------------------------------------------------------------------------- /docs/build/api/operations.rst: -------------------------------------------------------------------------------- 1 | .. _alembic.operations.toplevel: 2 | 3 | ===================== 4 | Operation Directives 5 | ===================== 6 | 7 | .. note:: this section discusses the **internal API of Alembic** as regards 8 | the internal system of defining migration operation directives. 9 | This section is only useful for developers who wish to extend the 10 | capabilities of Alembic. For end-user guidance on Alembic migration 11 | operations, please see :ref:`ops`. 12 | 13 | Within migration scripts, actual database migration operations are handled 14 | via an instance of :class:`.Operations`. The :class:`.Operations` class 15 | lists out available migration operations that are linked to a 16 | :class:`.MigrationContext`, which communicates instructions originated 17 | by the :class:`.Operations` object into SQL that is sent to a database or SQL 18 | output stream. 19 | 20 | Most methods on the :class:`.Operations` class are generated dynamically 21 | using a "plugin" system, described in the next section 22 | :ref:`operation_plugins`. Additionally, when Alembic migration scripts 23 | actually run, the methods on the current :class:`.Operations` object are 24 | proxied out to the ``alembic.op`` module, so that they are available 25 | using module-style access. 26 | 27 | For an overview of how to use an :class:`.Operations` object directly 28 | in programs, as well as for reference to the standard operation methods 29 | as well as "batch" methods, see :ref:`ops`. 30 | 31 | .. _operation_plugins: 32 | 33 | Operation Plugins 34 | ===================== 35 | 36 | The Operations object is extensible using a plugin system. This system 37 | allows one to add new ``op.`` methods at runtime. The 38 | steps to use this system are to first create a subclass of 39 | :class:`.MigrateOperation`, register it using the :meth:`.Operations.register_operation` 40 | class decorator, then build a default "implementation" function which is 41 | established using the :meth:`.Operations.implementation_for` decorator. 42 | 43 | Below we illustrate a very simple operation ``CreateSequenceOp`` which 44 | will implement a new method ``op.create_sequence()`` for use in 45 | migration scripts:: 46 | 47 | from alembic.operations import Operations, MigrateOperation 48 | 49 | @Operations.register_operation("create_sequence") 50 | class CreateSequenceOp(MigrateOperation): 51 | """Create a SEQUENCE.""" 52 | 53 | def __init__(self, sequence_name, schema=None): 54 | self.sequence_name = sequence_name 55 | self.schema = schema 56 | 57 | @classmethod 58 | def create_sequence(cls, operations, sequence_name, **kw): 59 | """Issue a "CREATE SEQUENCE" instruction.""" 60 | 61 | op = CreateSequenceOp(sequence_name, **kw) 62 | return operations.invoke(op) 63 | 64 | def reverse(self): 65 | # only needed to support autogenerate 66 | return DropSequenceOp(self.sequence_name, schema=self.schema) 67 | 68 | @Operations.register_operation("drop_sequence") 69 | class DropSequenceOp(MigrateOperation): 70 | """Drop a SEQUENCE.""" 71 | 72 | def __init__(self, sequence_name, schema=None): 73 | self.sequence_name = sequence_name 74 | self.schema = schema 75 | 76 | @classmethod 77 | def drop_sequence(cls, operations, sequence_name, **kw): 78 | """Issue a "DROP SEQUENCE" instruction.""" 79 | 80 | op = DropSequenceOp(sequence_name, **kw) 81 | return operations.invoke(op) 82 | 83 | def reverse(self): 84 | # only needed to support autogenerate 85 | return CreateSequenceOp(self.sequence_name, schema=self.schema) 86 | 87 | Above, the ``CreateSequenceOp`` and ``DropSequenceOp`` classes represent 88 | new operations that will 89 | be available as ``op.create_sequence()`` and ``op.drop_sequence()``. 90 | The reason the operations 91 | are represented as stateful classes is so that an operation and a specific 92 | set of arguments can be represented generically; the state can then correspond 93 | to different kinds of operations, such as invoking the instruction against 94 | a database, or autogenerating Python code for the operation into a 95 | script. 96 | 97 | In order to establish the migrate-script behavior of the new operations, 98 | we use the :meth:`.Operations.implementation_for` decorator:: 99 | 100 | @Operations.implementation_for(CreateSequenceOp) 101 | def create_sequence(operations, operation): 102 | if operation.schema is not None: 103 | name = "%s.%s" % (operation.schema, operation.sequence_name) 104 | else: 105 | name = operation.sequence_name 106 | operations.execute("CREATE SEQUENCE %s" % name) 107 | 108 | 109 | @Operations.implementation_for(DropSequenceOp) 110 | def drop_sequence(operations, operation): 111 | if operation.schema is not None: 112 | name = "%s.%s" % (operation.schema, operation.sequence_name) 113 | else: 114 | name = operation.sequence_name 115 | operations.execute("DROP SEQUENCE %s" % name) 116 | 117 | Above, we use the simplest possible technique of invoking our DDL, which 118 | is just to call :meth:`.Operations.execute` with literal SQL. If this is 119 | all a custom operation needs, then this is fine. However, options for 120 | more comprehensive support include building out a custom SQL construct, 121 | as documented at :ref:`sqlalchemy.ext.compiler_toplevel`. 122 | 123 | With the above two steps, a migration script can now use new methods 124 | ``op.create_sequence()`` and ``op.drop_sequence()`` that will proxy to 125 | our object as a classmethod:: 126 | 127 | def upgrade(): 128 | op.create_sequence("my_sequence") 129 | 130 | def downgrade(): 131 | op.drop_sequence("my_sequence") 132 | 133 | The registration of new operations only needs to occur in time for the 134 | ``env.py`` script to invoke :meth:`.MigrationContext.run_migrations`; 135 | within the module level of the ``env.py`` script is sufficient. 136 | 137 | .. seealso:: 138 | 139 | :ref:`autogen_custom_ops` - how to add autogenerate support to 140 | custom operations. 141 | 142 | .. _operation_objects: 143 | .. _alembic.operations.ops.toplevel: 144 | 145 | Built-in Operation Objects 146 | ============================== 147 | 148 | The migration operations present on :class:`.Operations` are themselves 149 | delivered via operation objects that represent an operation and its 150 | arguments. All operations descend from the :class:`.MigrateOperation` 151 | class, and are registered with the :class:`.Operations` class using 152 | the :meth:`.Operations.register_operation` class decorator. The 153 | :class:`.MigrateOperation` objects also serve as the basis for how the 154 | autogenerate system renders new migration scripts. 155 | 156 | .. seealso:: 157 | 158 | :ref:`operation_plugins` 159 | 160 | :ref:`customizing_revision` 161 | 162 | The built-in operation objects are listed below. 163 | 164 | .. automodule:: alembic.operations.ops 165 | :members: 166 | -------------------------------------------------------------------------------- /docs/build/api/overview.rst: -------------------------------------------------------------------------------- 1 | ======== 2 | Overview 3 | ======== 4 | 5 | .. note:: this section is a technical overview of the 6 | **internal API of Alembic**. 7 | This section is only useful for developers who wish to extend the 8 | capabilities of Alembic; for regular users, reading this section 9 | is **not necessary**. 10 | 11 | A visualization of the primary features of Alembic's internals is presented 12 | in the following figure. The module and class boxes do not list out 13 | all the operations provided by each unit; only a small set of representative 14 | elements intended to convey the primary purpose of each system. 15 | 16 | .. image:: api_overview.png 17 | 18 | The script runner for Alembic is present in the :ref:`alembic.config.toplevel` module. 19 | This module produces a :class:`.Config` object and passes it to the 20 | appropriate function in :ref:`alembic.command.toplevel`. Functions within 21 | :ref:`alembic.command.toplevel` will typically instantiate an 22 | :class:`.ScriptDirectory` instance, which represents the collection of 23 | version files, and an :class:`.EnvironmentContext`, which is a configurational 24 | facade passed to the environment's ``env.py`` script. 25 | 26 | The :class:`.EnvironmentContext` object is the primary object used within 27 | the ``env.py`` script, whose main purpose is that of a facade for creating and using 28 | a :class:`.MigrationContext` object, which is the actual migration engine 29 | that refers to a database implementation. The primary method called 30 | on this object within an ``env.py`` script is the 31 | :meth:`.EnvironmentContext.configure` method, which sets up the 32 | :class:`.MigrationContext` with database connectivity and behavioral 33 | configuration. It also supplies methods for transaction demarcation and 34 | migration running, but these methods ultimately call upon the 35 | :class:`.MigrationContext` that's been configured. 36 | 37 | :class:`.MigrationContext` is the gateway to the database 38 | for other parts of the application, and produces a :class:`.DefaultImpl` 39 | object which does the actual database communication, and knows how to 40 | create the specific SQL text of the various DDL directives such as 41 | ALTER TABLE; :class:`.DefaultImpl` has subclasses that are per-database-backend. 42 | In "offline" mode (e.g. ``--sql``), the :class:`.MigrationContext` will 43 | produce SQL to a file output stream instead of a database. 44 | 45 | During an upgrade or downgrade operation, a specific series of migration 46 | scripts are invoked starting with the :class:`.MigrationContext` in conjunction 47 | with the :class:`.ScriptDirectory`; the actual scripts themselves make use 48 | of the :class:`.Operations` object, which provide the end-user interface to 49 | specific database operations. The :class:`.Operations` object is generated 50 | based on a series of "operation directive" objects that are user-extensible, 51 | and start out in the :ref:`alembic.operations.ops.toplevel` module. 52 | 53 | Another prominent feature of Alembic is the "autogenerate" feature, which 54 | produces new migration scripts that contain Python code. The autogenerate 55 | feature starts in :ref:`alembic.autogenerate.toplevel`, and is used exclusively 56 | by the :func:`.alembic.command.revision` command when the ``--autogenerate`` 57 | flag is passed. Autogenerate refers to the :class:`.MigrationContext` 58 | and :class:`.DefaultImpl` in order to access database connectivity and 59 | access per-backend rules for autogenerate comparisons. It also makes use 60 | of :ref:`alembic.operations.ops.toplevel` in order to represent the operations that 61 | it will render into scripts. 62 | 63 | -------------------------------------------------------------------------------- /docs/build/api/runtime.rst: -------------------------------------------------------------------------------- 1 | .. _alembic.runtime.environment.toplevel: 2 | 3 | ======================= 4 | Runtime Objects 5 | ======================= 6 | 7 | The "runtime" of Alembic involves the :class:`.EnvironmentContext` 8 | and :class:`.MigrationContext` objects. These are the objects that are 9 | in play once the ``env.py`` script is loaded up by a command and 10 | a migration operation proceeds. 11 | 12 | The Environment Context 13 | ======================= 14 | 15 | The :class:`.EnvironmentContext` class provides most of the 16 | API used within an ``env.py`` script. Within ``env.py``, 17 | the instantiated :class:`.EnvironmentContext` is made available 18 | via a special *proxy module* called ``alembic.context``. That is, 19 | you can import ``alembic.context`` like a regular Python module, 20 | and each name you call upon it is ultimately routed towards the 21 | current :class:`.EnvironmentContext` in use. 22 | 23 | In particular, the key method used within ``env.py`` is :meth:`.EnvironmentContext.configure`, 24 | which establishes all the details about how the database will be accessed. 25 | 26 | .. automodule:: alembic.runtime.environment 27 | :members: EnvironmentContext 28 | 29 | .. _alembic.runtime.migration.toplevel: 30 | 31 | The Migration Context 32 | ===================== 33 | 34 | The :class:`.MigrationContext` handles the actual work to be performed 35 | against a database backend as migration operations proceed. It is generally 36 | not exposed to the end-user, except when the 37 | :paramref:`~.EnvironmentContext.configure.on_version_apply` callback hook is used. 38 | 39 | .. automodule:: alembic.runtime.migration 40 | :members: MigrationContext 41 | -------------------------------------------------------------------------------- /docs/build/api/script.rst: -------------------------------------------------------------------------------- 1 | .. _alembic.script.toplevel: 2 | 3 | ================ 4 | Script Directory 5 | ================ 6 | 7 | The :class:`.ScriptDirectory` object provides programmatic access 8 | to the Alembic version files present in the filesystem. 9 | 10 | .. automodule:: alembic.script 11 | :members: 12 | 13 | Revision 14 | ======== 15 | 16 | The :class:`.RevisionMap` object serves as the basis for revision 17 | management, used exclusively by :class:`.ScriptDirectory`. 18 | 19 | .. automodule:: alembic.script.revision 20 | :members: 21 | 22 | Write Hooks 23 | =========== 24 | 25 | .. automodule:: alembic.script.write_hooks 26 | :members: 27 | -------------------------------------------------------------------------------- /docs/build/front.rst: -------------------------------------------------------------------------------- 1 | ============ 2 | Front Matter 3 | ============ 4 | 5 | Information about the Alembic project. 6 | 7 | Project Homepage 8 | ================ 9 | 10 | Alembic is hosted on GitHub at https://github.com/sqlalchemy/alembic under the SQLAlchemy organization. 11 | 12 | Releases and project status are available on Pypi at https://pypi.python.org/pypi/alembic. 13 | 14 | The most recent published version of this documentation should be at https://alembic.sqlalchemy.org. 15 | 16 | 17 | .. _installation: 18 | 19 | Installation 20 | ============ 21 | 22 | While Alembic can be installed system wide, it's more common that it's 23 | installed local to a `virtual environment 24 | `_ , as it also uses libraries 25 | such as SQLAlchemy and database drivers that are more appropriate for 26 | local installations. 27 | 28 | The documentation below is **only one kind of approach to installing Alembic 29 | for a project**; there are many such approaches. The documentation below is 30 | provided only for those users who otherwise have no specific project setup 31 | chosen. 32 | 33 | To build a virtual environment for a specific project, a virtual environment 34 | can be created using the 35 | `Python venv library `_:: 36 | 37 | $ cd /path/to/your/project 38 | $ python -m venv .venv 39 | 40 | There is now a Python interpreter that you can access in 41 | ``/path/to/your/project/.venv/bin/python``, as well as the `pip 42 | `_ installer tool in 43 | ``/path/to/your/project/.venv/bin/pip``. 44 | 45 | Next,the ``activate`` command installed by venv can be used so that 46 | all binaries local to this new Python environment are in the local path:: 47 | 48 | $ source /path/to/your/project/.venv/bin/activate 49 | 50 | We now install Alembic as follows:: 51 | 52 | $ pip install alembic 53 | 54 | The install will add the ``alembic`` command to the virtual environment. All 55 | operations with Alembic in terms of this specific virtual environment will then 56 | proceed through the usage of this command, as in:: 57 | 58 | $ alembic init alembic 59 | 60 | Finally, assuming your project is itself installable, meaning it has a 61 | ``pyproject.toml`` file, and/or ``setup.py`` script, the local project can 62 | be made a part of the same local environment by installing it with ``pip``, 63 | optionally using "editable" mode:: 64 | 65 | $ pip install -e . 66 | 67 | 68 | 69 | Dependencies 70 | ------------ 71 | 72 | Alembic's install process will ensure that SQLAlchemy_ 73 | is installed, in addition to other dependencies. Alembic will work with 74 | SQLAlchemy as of version **1.4.0**. 75 | 76 | .. versionchanged:: 1.15.0 Support for SQLAlchemy older than 1.4.0 was dropped. 77 | 78 | Alembic supports Python versions **3.9 and above** 79 | 80 | .. versionchanged:: 1.15 Alembic now supports Python 3.9 and newer. 81 | 82 | .. _versioning_scheme: 83 | 84 | Versioning Scheme 85 | ----------------- 86 | 87 | Alembic's versioning scheme is based on that of 88 | `SQLAlchemy's versioning scheme `_. 89 | In particular, it should be noted that while Alembic uses a three-number 90 | versioning scheme, it **does not use SemVer**. In SQLAlchemy and Alembic's 91 | scheme, **the middle digit is considered to be a "Significant Minor Release", 92 | which may include removal of previously deprecated APIs with some risk of 93 | non-backwards compatibility in a very small number of cases**. 94 | 95 | This means that version "1.8.0", "1.9.0", "1.10.0", "1.11.0", etc. are 96 | **Significant Minor Releases**, which will include new API features and may 97 | remove or modify existing ones. 98 | 99 | Therefore, when `pinning `_ 100 | Alembic releases, pin to the "major" and "minor" digits to avoid API changes. 101 | 102 | A true "Major" release such as a change to "2.0" would include complete 103 | redesigns/re-architectures of foundational features; currently no such series 104 | of changes are planned, although changes such as replacing the entire 105 | "autogenerate" scheme with a new approach would qualify for that level of 106 | change. 107 | 108 | 109 | 110 | Community 111 | ========= 112 | 113 | Alembic is developed by `Mike Bayer `_, and is 114 | part of the SQLAlchemy_ project. 115 | 116 | User issues, discussion of potential bugs and features are most easily 117 | discussed using `GitHub Discussions `_. 118 | 119 | .. _bugs: 120 | 121 | Bugs 122 | ==== 123 | 124 | Bugs and feature enhancements to Alembic should be reported on the `GitHub 125 | issue tracker 126 | `_. 127 | 128 | .. _SQLAlchemy: https://www.sqlalchemy.org 129 | -------------------------------------------------------------------------------- /docs/build/index.rst: -------------------------------------------------------------------------------- 1 | =================================== 2 | Welcome to Alembic's documentation! 3 | =================================== 4 | 5 | `Alembic `_ is a lightweight database migration tool for usage 6 | with the `SQLAlchemy `_ Database Toolkit for Python. 7 | 8 | .. toctree:: 9 | :maxdepth: 3 10 | 11 | front 12 | tutorial 13 | autogenerate 14 | offline 15 | naming 16 | batch 17 | branches 18 | ops 19 | cookbook 20 | api/index 21 | changelog 22 | 23 | Indices and tables 24 | ================== 25 | 26 | * :ref:`genindex` 27 | * :ref:`modindex` 28 | * :ref:`search` 29 | 30 | -------------------------------------------------------------------------------- /docs/build/make.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | REM Command file for Sphinx documentation 4 | 5 | set SPHINXBUILD=sphinx-build 6 | set BUILDDIR=build 7 | set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% source 8 | if NOT "%PAPER%" == "" ( 9 | set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS% 10 | ) 11 | 12 | if "%1" == "" goto help 13 | 14 | if "%1" == "help" ( 15 | :help 16 | echo.Please use `make ^` where ^ is one of 17 | echo. html to make standalone HTML files 18 | echo. dirhtml to make HTML files named index.html in directories 19 | echo. pickle to make pickle files 20 | echo. json to make JSON files 21 | echo. htmlhelp to make HTML files and a HTML help project 22 | echo. qthelp to make HTML files and a qthelp project 23 | echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter 24 | echo. changes to make an overview over all changed/added/deprecated items 25 | echo. linkcheck to check all external links for integrity 26 | echo. doctest to run all doctests embedded in the documentation if enabled 27 | goto end 28 | ) 29 | 30 | if "%1" == "clean" ( 31 | for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i 32 | del /q /s %BUILDDIR%\* 33 | goto end 34 | ) 35 | 36 | if "%1" == "html" ( 37 | %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html 38 | echo. 39 | echo.Build finished. The HTML pages are in %BUILDDIR%/html. 40 | goto end 41 | ) 42 | 43 | if "%1" == "dirhtml" ( 44 | %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml 45 | echo. 46 | echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml. 47 | goto end 48 | ) 49 | 50 | if "%1" == "pickle" ( 51 | %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle 52 | echo. 53 | echo.Build finished; now you can process the pickle files. 54 | goto end 55 | ) 56 | 57 | if "%1" == "json" ( 58 | %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json 59 | echo. 60 | echo.Build finished; now you can process the JSON files. 61 | goto end 62 | ) 63 | 64 | if "%1" == "htmlhelp" ( 65 | %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp 66 | echo. 67 | echo.Build finished; now you can run HTML Help Workshop with the ^ 68 | .hhp project file in %BUILDDIR%/htmlhelp. 69 | goto end 70 | ) 71 | 72 | if "%1" == "qthelp" ( 73 | %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp 74 | echo. 75 | echo.Build finished; now you can run "qcollectiongenerator" with the ^ 76 | .qhcp project file in %BUILDDIR%/qthelp, like this: 77 | echo.^> qcollectiongenerator %BUILDDIR%\qthelp\Alembic.qhcp 78 | echo.To view the help file: 79 | echo.^> assistant -collectionFile %BUILDDIR%\qthelp\Alembic.ghc 80 | goto end 81 | ) 82 | 83 | if "%1" == "latex" ( 84 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex 85 | echo. 86 | echo.Build finished; the LaTeX files are in %BUILDDIR%/latex. 87 | goto end 88 | ) 89 | 90 | if "%1" == "changes" ( 91 | %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes 92 | echo. 93 | echo.The overview file is in %BUILDDIR%/changes. 94 | goto end 95 | ) 96 | 97 | if "%1" == "linkcheck" ( 98 | %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck 99 | echo. 100 | echo.Link check complete; look for any errors in the above output ^ 101 | or in %BUILDDIR%/linkcheck/output.txt. 102 | goto end 103 | ) 104 | 105 | if "%1" == "doctest" ( 106 | %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest 107 | echo. 108 | echo.Testing of doctests in the sources finished, look at the ^ 109 | results in %BUILDDIR%/doctest/output.txt. 110 | goto end 111 | ) 112 | 113 | :end 114 | -------------------------------------------------------------------------------- /docs/build/offline.rst: -------------------------------------------------------------------------------- 1 | Generating SQL Scripts (a.k.a. "Offline Mode") 2 | ============================================== 3 | 4 | A major capability of Alembic is to generate migrations as SQL scripts, instead of running 5 | them against the database - this is also referred to as *offline mode*. 6 | This is a critical feature when working in large organizations 7 | where access to DDL is restricted, and SQL scripts must be handed off to DBAs. Alembic makes 8 | this easy via the ``--sql`` option passed to any ``upgrade`` or ``downgrade`` command. We 9 | can, for example, generate a script that revises up to rev ``ae1027a6acf``:: 10 | 11 | $ alembic upgrade ae1027a6acf --sql 12 | INFO [alembic.context] Context class PostgresqlContext. 13 | INFO [alembic.context] Will assume transactional DDL. 14 | BEGIN; 15 | 16 | CREATE TABLE alembic_version ( 17 | version_num VARCHAR(32) NOT NULL 18 | ); 19 | 20 | INFO [alembic.context] Running upgrade None -> 1975ea83b712 21 | CREATE TABLE account ( 22 | id SERIAL NOT NULL, 23 | name VARCHAR(50) NOT NULL, 24 | description VARCHAR(200), 25 | PRIMARY KEY (id) 26 | ); 27 | 28 | INFO [alembic.context] Running upgrade 1975ea83b712 -> ae1027a6acf 29 | ALTER TABLE account ADD COLUMN last_transaction_date TIMESTAMP WITHOUT TIME ZONE; 30 | 31 | INSERT INTO alembic_version (version_num) VALUES ('ae1027a6acf'); 32 | 33 | COMMIT; 34 | 35 | 36 | While the logging configuration dumped to standard error, the actual script was dumped to standard output - 37 | so in the absence of further configuration (described later in this section), we'd at first be using output 38 | redirection to generate a script:: 39 | 40 | $ alembic upgrade ae1027a6acf --sql > migration.sql 41 | 42 | Getting the Start Version 43 | -------------------------- 44 | 45 | Notice that our migration script started at the base - this is the default when using offline 46 | mode, as no database connection is present and there's no ``alembic_version`` table to read from. 47 | 48 | One way to provide a starting version in offline mode is to provide a range to the command line. 49 | This is accomplished by providing the "version" in ``start:end`` syntax:: 50 | 51 | $ alembic upgrade 1975ea83b712:ae1027a6acf --sql > migration.sql 52 | 53 | The ``start:end`` syntax is only allowed in offline mode; in "online" mode, the ``alembic_version`` 54 | table is always used to get at the current version. 55 | 56 | It's also possible to have the ``env.py`` script retrieve the "last" version from 57 | the local environment, such as from a local file. A scheme like this would basically 58 | treat a local file in the same way ``alembic_version`` works:: 59 | 60 | if context.is_offline_mode(): 61 | version_file = os.path.join(os.path.dirname(config.config_file_name), "version.txt") 62 | if os.path.exists(version_file): 63 | current_version = open(version_file).read() 64 | else: 65 | current_version = None 66 | context.configure(dialect_name=engine.name, starting_rev=current_version) 67 | context.run_migrations() 68 | end_version = context.get_revision_argument() 69 | if end_version and end_version != current_version: 70 | open(version_file, 'w').write(end_version) 71 | 72 | Writing Migration Scripts to Support Script Generation 73 | ------------------------------------------------------ 74 | 75 | The challenge of SQL script generation is that the scripts we generate can't rely upon 76 | any client/server database access. This means a migration script that pulls some rows 77 | into memory via a ``SELECT`` statement will not work in ``--sql`` mode. It's also 78 | important that the Alembic directives, all of which are designed specifically to work 79 | in both "live execution" as well as "offline SQL generation" mode, are used. 80 | 81 | Customizing the Environment 82 | --------------------------- 83 | 84 | Users of the ``--sql`` option are encouraged to hack their ``env.py`` files to suit their 85 | needs. The ``env.py`` script as provided is broken into two sections: ``run_migrations_online()`` 86 | and ``run_migrations_offline()``. Which function is run is determined at the bottom of the 87 | script by reading :meth:`.EnvironmentContext.is_offline_mode`, which basically determines if the 88 | ``--sql`` flag was enabled. 89 | 90 | For example, a multiple database configuration may want to run through each 91 | database and set the output of the migrations to different named files - the :meth:`.EnvironmentContext.configure` 92 | function accepts a parameter ``output_buffer`` for this purpose. Below we illustrate 93 | this within the ``run_migrations_offline()`` function:: 94 | 95 | from alembic import context 96 | import myapp 97 | import sys 98 | 99 | db_1 = myapp.db_1 100 | db_2 = myapp.db_2 101 | 102 | def run_migrations_offline(): 103 | """Run migrations *without* a SQL connection.""" 104 | 105 | for name, engine, file_ in [ 106 | ("db1", db_1, "db1.sql"), 107 | ("db2", db_2, "db2.sql"), 108 | ]: 109 | context.configure( 110 | url=engine.url, 111 | transactional_ddl=False, 112 | output_buffer=open(file_, 'w')) 113 | context.execute("-- running migrations for '%s'" % name) 114 | context.run_migrations(name=name) 115 | sys.stderr.write("Wrote file '%s'" % file_) 116 | 117 | def run_migrations_online(): 118 | """Run migrations *with* a SQL connection.""" 119 | 120 | for name, engine in [ 121 | ("db1", db_1), 122 | ("db2", db_2), 123 | ]: 124 | connection = engine.connect() 125 | context.configure(connection=connection) 126 | try: 127 | context.run_migrations(name=name) 128 | session.commit() 129 | except: 130 | session.rollback() 131 | raise 132 | 133 | if context.is_offline_mode(): 134 | run_migrations_offline() 135 | else: 136 | run_migrations_online() 137 | 138 | -------------------------------------------------------------------------------- /docs/build/ops.rst: -------------------------------------------------------------------------------- 1 | .. _ops: 2 | 3 | =================== 4 | Operation Reference 5 | =================== 6 | 7 | This file provides documentation on Alembic migration directives. 8 | 9 | The directives here are used within user-defined migration files, 10 | within the ``upgrade()`` and ``downgrade()`` functions, as well as 11 | any functions further invoked by those. 12 | 13 | All directives exist as methods on a class called :class:`.Operations`. 14 | When migration scripts are run, this object is made available 15 | to the script via the ``alembic.op`` datamember, which is 16 | a *proxy* to an actual instance of :class:`.Operations`. 17 | Currently, ``alembic.op`` is a real Python module, populated 18 | with individual proxies for each method on :class:`.Operations`, 19 | so symbols can be imported safely from the ``alembic.op`` namespace. 20 | 21 | The :class:`.Operations` system is also fully extensible. See 22 | :ref:`operation_plugins` for details on this. 23 | 24 | A key design philosophy to the :ref:`alembic.operations.toplevel` methods is that 25 | to the greatest degree possible, they internally generate the 26 | appropriate SQLAlchemy metadata, typically involving 27 | :class:`~sqlalchemy.schema.Table` and :class:`~sqlalchemy.schema.Constraint` 28 | objects. This so that migration instructions can be 29 | given in terms of just the string names and/or flags involved. 30 | The exceptions to this 31 | rule include the :meth:`~.Operations.add_column` and :meth:`~.Operations.create_table` 32 | directives, which require full :class:`~sqlalchemy.schema.Column` 33 | objects, though the table metadata is still generated here. 34 | 35 | The functions here all require that a :class:`.MigrationContext` has been 36 | configured within the ``env.py`` script first, which is typically 37 | via :meth:`.EnvironmentContext.configure`. Under normal 38 | circumstances they are called from an actual migration script, which 39 | itself would be invoked by the :meth:`.EnvironmentContext.run_migrations` 40 | method. 41 | 42 | .. module:: alembic.operations 43 | 44 | .. class:: AbstractOperations 45 | 46 | Base class for :class:`.Operations` and :class:`.BatchOperations`. 47 | 48 | See :class:`.Operations` for full list of members 49 | 50 | .. autoclass:: Operations 51 | :members: 52 | :inherited-members: 53 | 54 | .. autoclass:: BatchOperations 55 | :members: 56 | -------------------------------------------------------------------------------- /docs/build/requirements.txt: -------------------------------------------------------------------------------- 1 | git+https://github.com/sqlalchemyorg/changelog.git#egg=changelog 2 | git+https://github.com/sqlalchemyorg/sphinx-paramlinks.git#egg=sphinx-paramlinks 3 | git+https://github.com/sqlalchemy/sqlalchemy.git 4 | backports.zoneinfo;python_version<"3.9" 5 | # because there's a dependency in pyfiles.py 6 | Mako 7 | importlib-metadata;python_version<"3.9" 8 | importlib-resources;python_version<"3.9" 9 | sphinx_copybutton==0.5.1 10 | sphinx-book-theme 11 | 12 | 13 | -------------------------------------------------------------------------------- /docs/build/unreleased/README.txt: -------------------------------------------------------------------------------- 1 | Individual per-changelog files go here 2 | in .rst format, which are pulled in by 3 | changelog (version 0.4.0 or higher) to 4 | be rendered into the changelog_xx.rst file. 5 | At release time, the files here are removed and written 6 | directly into the changelog. 7 | 8 | Rationale is so that multiple changes being merged 9 | into gerrit don't produce conflicts. Note that 10 | gerrit does not support custom merge handlers unlike 11 | git itself. 12 | 13 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | build-backend = "setuptools.build_meta" 3 | requires = ["setuptools>=77.0.3"] 4 | 5 | 6 | [project] 7 | name = "alembic" 8 | description = "A database migration tool for SQLAlchemy." 9 | authors = [{name = "Mike Bayer", email = "mike_mp@zzzcomputing.com"}] 10 | license = "MIT" 11 | license-files = ["LICENSE"] 12 | classifiers = [ 13 | "Development Status :: 5 - Production/Stable", 14 | "Intended Audience :: Developers", 15 | "Environment :: Console", 16 | "Operating System :: OS Independent", 17 | "Programming Language :: Python", 18 | "Programming Language :: Python :: 3", 19 | "Programming Language :: Python :: 3.9", 20 | "Programming Language :: Python :: 3.10", 21 | "Programming Language :: Python :: 3.11", 22 | "Programming Language :: Python :: 3.12", 23 | "Programming Language :: Python :: 3.13", 24 | "Programming Language :: Python :: Implementation :: CPython", 25 | "Programming Language :: Python :: Implementation :: PyPy", 26 | "Topic :: Database :: Front-Ends", 27 | ] 28 | requires-python = ">=3.9" 29 | dependencies = [ 30 | "SQLAlchemy>=1.4.0", 31 | "Mako", 32 | "typing-extensions>=4.12", 33 | "tomli;python_version<'3.11'", 34 | ] 35 | dynamic = ["version"] 36 | 37 | [project.readme] 38 | file = "README.rst" 39 | content-type = "text/x-rst" 40 | 41 | [project.urls] 42 | Homepage = "https://alembic.sqlalchemy.org" 43 | Documentation = "https://alembic.sqlalchemy.org/en/latest/" 44 | Changelog = "https://alembic.sqlalchemy.org/en/latest/changelog.html" 45 | Source = "https://github.com/sqlalchemy/alembic/" 46 | "Issue Tracker" = "https://github.com/sqlalchemy/alembic/issues/" 47 | 48 | [project.optional-dependencies] 49 | tz = ["tzdata"] 50 | 51 | [project.scripts] 52 | alembic = "alembic.config:main" 53 | 54 | [tool.setuptools] 55 | include-package-data = true 56 | zip-safe = false 57 | package-dir = {"" = "."} 58 | 59 | [tool.setuptools.package-data] 60 | "*" = ["*.pyi", "py.typed", "*.mako", "README"] 61 | 62 | 63 | [tool.setuptools.packages.find] 64 | include = ["alembic*"] 65 | exclude = [ 66 | "test*", 67 | "examples*", 68 | ] 69 | namespaces = true 70 | 71 | 72 | [tool.setuptools.dynamic] 73 | version = {attr = "alembic.__version__"} 74 | 75 | 76 | [tool.black] 77 | line-length = 79 78 | target-version = ['py39'] 79 | 80 | [tool.pytest.ini_options] 81 | addopts = "--tb native -v -r sfxX -p no:warnings -p no:logging --maxfail=100" 82 | python_files = "tests/test_*.py" 83 | markers = [ 84 | "backend: tests that should run on all backends; typically dialect-sensitive", 85 | "mypy: mypy integration / plugin tests (not used by Alembic currently)", 86 | ] 87 | 88 | 89 | 90 | [tool.mypy] 91 | 92 | exclude = [ 93 | 'alembic/template', 94 | 'alembic.testing.*', 95 | ] 96 | show_error_codes = true 97 | 98 | [[tool.mypy.overrides]] 99 | 100 | module = [ 101 | "alembic.*" 102 | ] 103 | 104 | warn_unused_ignores = true 105 | strict = true 106 | 107 | 108 | 109 | [[tool.mypy.overrides]] 110 | module = [ 111 | 'mako.*', 112 | 'sqlalchemy.testing.*' 113 | ] 114 | ignore_missing_imports = true 115 | 116 | 117 | -------------------------------------------------------------------------------- /reap_dbs.py: -------------------------------------------------------------------------------- 1 | """Drop Oracle, SQL Server databases that are left over from a 2 | multiprocessing test run. 3 | 4 | Currently the cx_Oracle driver seems to sometimes not release a 5 | TCP connection even if close() is called, which prevents the provisioning 6 | system from dropping a database in-process. 7 | 8 | For SQL Server, databases still remain in use after tests run and 9 | running a kill of all detected sessions does not seem to release the 10 | database in process. 11 | 12 | """ 13 | 14 | import logging 15 | import sys 16 | 17 | from sqlalchemy.testing import provision 18 | 19 | 20 | logging.basicConfig() 21 | logging.getLogger(provision.__name__).setLevel(logging.INFO) 22 | 23 | if hasattr(provision, "reap_dbs"): 24 | provision.reap_dbs(sys.argv[1]) 25 | else: 26 | provision.reap_oracle_dbs(sys.argv[1]) 27 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [flake8] 2 | enable-extensions = G 3 | # E203 is due to https://github.com/PyCQA/pycodestyle/issues/373 4 | ignore = 5 | A003,A005 6 | D, 7 | E203,E305,E704,E711,E712,E721,E722,E741, 8 | N801,N802,N806, 9 | RST304,RST303,RST299,RST399, 10 | W503,W504 11 | exclude = .venv,.git,.tox,dist,doc,*egg,build 12 | filename = *.py,*.pyi 13 | import-order-style = google 14 | application-import-names = alembic,tests 15 | per-file-ignores = 16 | **/__init__.py:F401 17 | **/*.pyi:E302,E704,E266 18 | max-line-length = 79 19 | 20 | [sqla_testing] 21 | requirement_cls=tests.requirements:DefaultRequirements 22 | profile_file=tests/profiles.txt 23 | 24 | 25 | [db] 26 | default=sqlite:///:memory: 27 | sqlite=sqlite:///:memory: 28 | sqlite_file=sqlite:///querytest.db 29 | postgresql=postgresql://scott:tiger@127.0.0.1:5432/test 30 | psycopg=postgresql+psycopg://scott:tiger@127.0.0.1:5432/test 31 | mysql=mysql://scott:tiger@127.0.0.1:3306/test?charset=utf8mb4 32 | mariadb=mariadb://scott:tiger@127.0.0.1:3306/test?charset=utf8mb4 33 | mssql=mssql+pyodbc://scott:tiger^5HHH@mssql2017:1433/test?driver=ODBC+Driver+18+for+SQL+Server&TrustServerCertificate=yes 34 | oracle=oracle://scott:tiger@127.0.0.1:1521 35 | oracle8=oracle://scott:tiger@127.0.0.1:1521/?use_ansi=0 36 | 37 | 38 | 39 | 40 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | from setuptools import setup 2 | 3 | setup() 4 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/zzzeek/alembic/f0d65770c14dcaa19ec2c9270999bed2ef99a311/tests/__init__.py -------------------------------------------------------------------------------- /tests/conftest.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | """ 3 | pytest plugin script. 4 | 5 | This script is an extension to py.test which 6 | installs SQLAlchemy's testing plugin into the local environment. 7 | 8 | """ 9 | import os 10 | 11 | import pytest 12 | 13 | os.environ["SQLALCHEMY_WARN_20"] = "true" 14 | 15 | pytest.register_assert_rewrite("sqlalchemy.testing.assertions") 16 | 17 | 18 | # ideally, SQLAlchemy would allow us to just import bootstrap, 19 | # but for now we have to use its "load from a file" approach 20 | 21 | # use bootstrapping so that test plugins are loaded 22 | # without touching the main library before coverage starts 23 | bootstrap_file = os.path.join( 24 | os.path.dirname(__file__), 25 | "..", 26 | "alembic", 27 | "testing", 28 | "plugin", 29 | "bootstrap.py", 30 | ) 31 | 32 | 33 | with open(bootstrap_file) as f: 34 | code = compile(f.read(), "bootstrap.py", "exec") 35 | to_bootstrap = "pytest" 36 | exec(code, globals(), locals()) 37 | 38 | try: 39 | from sqlalchemy.testing import asyncio 40 | except ImportError: 41 | pass 42 | else: 43 | asyncio.ENABLE_ASYNCIO = False 44 | 45 | from sqlalchemy.testing.plugin.pytestplugin import * # noqa 46 | 47 | wrap_pytest_sessionstart = pytest_sessionstart # noqa 48 | 49 | def pytest_sessionstart(session): 50 | wrap_pytest_sessionstart(session) 51 | from alembic.testing import warnings 52 | 53 | warnings.setup_filters() 54 | -------------------------------------------------------------------------------- /tests/test_editor.py: -------------------------------------------------------------------------------- 1 | import os 2 | from os.path import join 3 | from unittest.mock import patch 4 | 5 | from alembic import util 6 | from alembic.testing import combinations 7 | from alembic.testing import expect_raises_message 8 | from alembic.testing.fixtures import TestBase 9 | 10 | 11 | class TestHelpers(TestBase): 12 | def common(self, cb, is_posix=True): 13 | with ( 14 | patch("alembic.util.editor.check_call") as check_call, 15 | patch("alembic.util.editor.exists") as exists, 16 | patch( 17 | "alembic.util.editor.is_posix", 18 | new=is_posix, 19 | ), 20 | patch("os.pathsep", new=":" if is_posix else ";"), 21 | ): 22 | cb(check_call, exists) 23 | 24 | @combinations((True,), (False,)) 25 | def test_edit_with_user_editor(self, posix): 26 | def go(check_call, exists): 27 | test_environ = {"EDITOR": "myvim", "PATH": "/usr/bin"} 28 | executable = join("/usr/bin", "myvim") 29 | if not posix: 30 | executable += ".exe" 31 | 32 | exists.side_effect = lambda fname: fname == executable 33 | util.open_in_editor("myfile", test_environ) 34 | check_call.assert_called_with([executable, "myfile"]) 35 | 36 | self.common(go, posix) 37 | 38 | @combinations(("EDITOR",), ("VISUAL",)) 39 | def test_edit_with_user_editor_exists(self, key): 40 | def go(check_call, exists): 41 | test_environ = {key: "myvim", "PATH": "/usr/bin"} 42 | exists.side_effect = lambda fname: fname == "myvim" 43 | util.open_in_editor("myfile", test_environ) 44 | check_call.assert_called_with(["myvim", "myfile"]) 45 | 46 | self.common(go) 47 | 48 | @combinations((True,), (False,)) 49 | def test_edit_with_user_editor_precedence(self, with_path): 50 | def go(check_call, exists): 51 | test_environ = { 52 | "EDITOR": "myvim", 53 | "VISUAL": "myvisual", 54 | "PATH": "/usr/bin", 55 | } 56 | exes = ["myvim", "myvisual"] 57 | if with_path: 58 | exes = [join("/usr/bin", n) for n in exes] 59 | exists.side_effect = lambda fname: fname in exes 60 | util.open_in_editor("myfile", test_environ) 61 | check_call.assert_called_with([exes[0], "myfile"]) 62 | 63 | self.common(go) 64 | 65 | def test_edit_with_user_editor_abs(self): 66 | def go(check_call, exists): 67 | test_environ = {"EDITOR": "/foo/myvim", "PATH": "/usr/bin"} 68 | exists.side_effect = lambda fname: fname == "/usr/bin/foo/myvim" 69 | with expect_raises_message(util.CommandError, "EDITOR"): 70 | util.open_in_editor("myfile", test_environ) 71 | 72 | self.common(go) 73 | 74 | def test_edit_with_default_editor(self): 75 | def go(check_call, exists): 76 | test_environ = {"PATH": os.pathsep.join(["/usr/bin", "/bin"])} 77 | executable = join("/bin", "vim") 78 | 79 | exists.side_effect = lambda fname: fname == executable 80 | util.open_in_editor("myfile", test_environ) 81 | check_call.assert_called_with([executable, "myfile"]) 82 | 83 | self.common(go) 84 | 85 | def test_edit_with_default_editor_windows(self): 86 | def go(check_call, exists): 87 | test_environ = { 88 | "PATH": os.pathsep.join( 89 | [r"C:\Windows\System32", r"C:\Users\user\bin"] 90 | ) 91 | } 92 | executable = join(r"C:\Users\user\bin", "notepad.exe") 93 | 94 | exists.side_effect = lambda fname: fname == executable 95 | util.open_in_editor("myfile", test_environ) 96 | check_call.assert_called_with([executable, "myfile"]) 97 | 98 | self.common(go, False) 99 | 100 | def test_edit_with_missing_editor(self): 101 | def go(check_call, exists): 102 | test_environ = {} 103 | exists.return_value = False 104 | with expect_raises_message(util.CommandError, "EDITOR"): 105 | util.open_in_editor("myfile", test_environ) 106 | 107 | self.common(go) 108 | -------------------------------------------------------------------------------- /tests/test_environment.py: -------------------------------------------------------------------------------- 1 | import os 2 | import sys 3 | 4 | from alembic import command 5 | from alembic import testing 6 | from alembic import util 7 | from alembic.environment import EnvironmentContext 8 | from alembic.migration import MigrationContext 9 | from alembic.script import ScriptDirectory 10 | from alembic.testing import config 11 | from alembic.testing import eq_ 12 | from alembic.testing import is_ 13 | from alembic.testing import mock 14 | from alembic.testing.assertions import expect_raises_message 15 | from alembic.testing.env import _get_staging_directory 16 | from alembic.testing.env import _no_sql_testing_config 17 | from alembic.testing.env import _sqlite_file_db 18 | from alembic.testing.env import _sqlite_testing_config 19 | from alembic.testing.env import clear_staging_env 20 | from alembic.testing.env import staging_env 21 | from alembic.testing.env import write_script 22 | from alembic.testing.fixtures import capture_context_buffer 23 | from alembic.testing.fixtures import TestBase 24 | 25 | 26 | class EnvironmentTest(TestBase): 27 | def setUp(self): 28 | staging_env() 29 | self.cfg = _no_sql_testing_config() 30 | 31 | def tearDown(self): 32 | clear_staging_env() 33 | 34 | def _fixture(self, **kw): 35 | script = ScriptDirectory.from_config(self.cfg) 36 | env = EnvironmentContext(self.cfg, script, **kw) 37 | return env 38 | 39 | def test_x_arg(self): 40 | env = self._fixture() 41 | self.cfg.cmd_opts = mock.Mock(x="y=5") 42 | eq_(env.get_x_argument(), "y=5") 43 | 44 | def test_x_arg_asdict(self): 45 | env = self._fixture() 46 | self.cfg.cmd_opts = mock.Mock(x=["y=5"]) 47 | eq_(env.get_x_argument(as_dictionary=True), {"y": "5"}) 48 | 49 | def test_x_arg_no_opts(self): 50 | env = self._fixture() 51 | eq_(env.get_x_argument(), []) 52 | 53 | def test_x_arg_no_opts_asdict(self): 54 | env = self._fixture() 55 | eq_(env.get_x_argument(as_dictionary=True), {}) 56 | 57 | def test_x_arg_empty_value(self): 58 | env = self._fixture() 59 | self.cfg.cmd_opts = mock.Mock(x=["y"]) 60 | eq_(env.get_x_argument(as_dictionary=True), {"y": ""}) 61 | 62 | def test_tag_arg(self): 63 | env = self._fixture(tag="x") 64 | eq_(env.get_tag_argument(), "x") 65 | 66 | def test_migration_context_has_config(self): 67 | env = self._fixture() 68 | env.configure(url="sqlite://") 69 | ctx = env._migration_context 70 | is_(ctx.config, self.cfg) 71 | 72 | ctx = MigrationContext(ctx.dialect, None, {}) 73 | is_(ctx.config, None) 74 | 75 | def test_sql_mode_parameters(self): 76 | env = self._fixture() 77 | 78 | a_rev = "arev" 79 | env.script.generate_revision(a_rev, "revision a", refresh=True) 80 | write_script( 81 | env.script, 82 | a_rev, 83 | """\ 84 | "Rev A" 85 | revision = '{}' 86 | down_revision = None 87 | 88 | from alembic import op 89 | 90 | def upgrade(): 91 | op.execute(''' 92 | do some SQL thing with a % percent sign % 93 | ''') 94 | 95 | """.format( 96 | a_rev 97 | ), 98 | ) 99 | with capture_context_buffer(transactional_ddl=True) as buf: 100 | command.upgrade(self.cfg, "arev", sql=True) 101 | assert "do some SQL thing with a % percent sign %" in buf.getvalue() 102 | 103 | @config.requirements.legacy_engine 104 | @testing.uses_deprecated( 105 | r"The Engine.execute\(\) function/method is considered legacy" 106 | ) 107 | def test_error_on_passing_engine(self): 108 | env = self._fixture() 109 | 110 | engine = _sqlite_file_db() 111 | 112 | a_rev = "arev" 113 | env.script.generate_revision(a_rev, "revision a", refresh=True) 114 | write_script( 115 | env.script, 116 | a_rev, 117 | """\ 118 | "Rev A" 119 | revision = '%s' 120 | down_revision = None 121 | 122 | from alembic import op 123 | 124 | 125 | def upgrade(): 126 | pass 127 | 128 | 129 | def downgrade(): 130 | pass 131 | 132 | """ 133 | % a_rev, 134 | ) 135 | migration_fn = mock.MagicMock() 136 | 137 | def upgrade(rev, context): 138 | migration_fn(rev, context) 139 | return env.script._upgrade_revs(a_rev, rev) 140 | 141 | with expect_raises_message( 142 | util.CommandError, 143 | r"'connection' argument to configure\(\) is " 144 | r"expected to be a sqlalchemy.engine.Connection ", 145 | ): 146 | env.configure( 147 | connection=engine, fn=upgrade, transactional_ddl=False 148 | ) 149 | 150 | 151 | class CWDTest(TestBase): 152 | def setUp(self): 153 | self.env = staging_env() 154 | self.cfg = _sqlite_testing_config() 155 | 156 | def tearDown(self): 157 | clear_staging_env() 158 | 159 | @testing.combinations( 160 | ( 161 | ".", 162 | None, 163 | ["."], 164 | ), 165 | ("/tmp/foo:/tmp/bar", None, ["/tmp/foo", "/tmp/bar"]), 166 | ("/tmp/foo:/tmp/bar", ":", ["/tmp/foo", "/tmp/bar"]), 167 | ("/tmp/foo /tmp/bar", None, ["/tmp/foo", "/tmp/bar"]), 168 | ("/tmp/foo,/tmp/bar", None, ["/tmp/foo", "/tmp/bar"]), 169 | (". /tmp/foo", None, [".", "/tmp/foo"]), 170 | (". /tmp/foo", "space", [".", "/tmp/foo"]), 171 | ) 172 | def test_sys_path_prepend(self, config_value, path_separator, expected): 173 | if path_separator is not None: 174 | self.cfg.set_main_option("path_separator", path_separator) 175 | self.cfg.set_main_option("prepend_sys_path", config_value) 176 | 177 | if path_separator is None: 178 | with testing.expect_deprecated( 179 | "No path_separator found in configuration;" 180 | ): 181 | script = ScriptDirectory.from_config(self.cfg) 182 | else: 183 | script = ScriptDirectory.from_config(self.cfg) 184 | env = EnvironmentContext(self.cfg, script) 185 | 186 | target = os.path.abspath(_get_staging_directory()) 187 | 188 | def assert_(heads, context): 189 | eq_( 190 | [os.path.abspath(p) for p in sys.path[0 : len(expected)]], 191 | [os.path.abspath(p) for p in expected], 192 | ) 193 | return [] 194 | 195 | p = [p for p in sys.path if os.path.abspath(p) != target] 196 | with mock.patch.object(sys, "path", p): 197 | env.configure(url="sqlite://", fn=assert_) 198 | with env: 199 | script.run_env() 200 | -------------------------------------------------------------------------------- /tests/test_external_dialect.py: -------------------------------------------------------------------------------- 1 | from sqlalchemy import MetaData 2 | from sqlalchemy import types as sqla_types 3 | from sqlalchemy.engine import default 4 | 5 | from alembic import autogenerate 6 | from alembic.autogenerate import api 7 | from alembic.autogenerate import render 8 | from alembic.ddl import impl 9 | from alembic.migration import MigrationContext 10 | from alembic.testing import eq_ 11 | from alembic.testing import eq_ignore_whitespace 12 | from alembic.testing.fixtures import TestBase 13 | 14 | 15 | class CustomDialect(default.DefaultDialect): 16 | name = "custom_dialect" 17 | 18 | 19 | try: 20 | from sqlalchemy.dialects import registry 21 | except ImportError: 22 | pass 23 | else: 24 | registry.register("custom_dialect", __name__, "CustomDialect") 25 | 26 | 27 | class CustomDialectImpl(impl.DefaultImpl): 28 | __dialect__ = "custom_dialect" 29 | transactional_ddl = False 30 | 31 | def render_type(self, type_, autogen_context): 32 | if type_.__module__ == __name__: 33 | autogen_context.imports.add( 34 | "from %s import custom_dialect_types" % (__name__,) 35 | ) 36 | is_external = True 37 | else: 38 | is_external = False 39 | 40 | if is_external and hasattr( 41 | self, "_render_%s_type" % type_.__visit_name__ 42 | ): 43 | meth = getattr(self, "_render_%s_type" % type_.__visit_name__) 44 | return meth(type_, autogen_context) 45 | 46 | if is_external: 47 | return "%s.%r" % ("custom_dialect_types", type_) 48 | else: 49 | return None 50 | 51 | def _render_EXT_ARRAY_type(self, type_, autogen_context): 52 | return render._render_type_w_subtype( 53 | type_, 54 | autogen_context, 55 | "item_type", 56 | r"(.+?\()", 57 | prefix="custom_dialect_types.", 58 | ) 59 | 60 | 61 | class EXT_ARRAY(sqla_types.TypeEngine): 62 | __visit_name__ = "EXT_ARRAY" 63 | 64 | def __init__(self, item_type): 65 | if isinstance(item_type, type): 66 | item_type = item_type() 67 | self.item_type = item_type 68 | super().__init__() 69 | 70 | 71 | class FOOBARTYPE(sqla_types.TypeEngine): 72 | __visit_name__ = "FOOBARTYPE" 73 | 74 | 75 | class ExternalDialectRenderTest(TestBase): 76 | def setUp(self): 77 | ctx_opts = { 78 | "sqlalchemy_module_prefix": "sa.", 79 | "alembic_module_prefix": "op.", 80 | "target_metadata": MetaData(), 81 | "user_module_prefix": None, 82 | } 83 | context = MigrationContext.configure( 84 | dialect_name="custom_dialect", opts=ctx_opts 85 | ) 86 | 87 | self.autogen_context = api.AutogenContext(context) 88 | 89 | def test_render_type(self): 90 | eq_ignore_whitespace( 91 | autogenerate.render._repr_type(FOOBARTYPE(), self.autogen_context), 92 | "custom_dialect_types.FOOBARTYPE()", 93 | ) 94 | 95 | eq_( 96 | self.autogen_context.imports, 97 | { 98 | "from tests.test_external_dialect " 99 | "import custom_dialect_types" 100 | }, 101 | ) 102 | 103 | def test_external_nested_render_sqla_type(self): 104 | eq_ignore_whitespace( 105 | autogenerate.render._repr_type( 106 | EXT_ARRAY(sqla_types.Integer), self.autogen_context 107 | ), 108 | "custom_dialect_types.EXT_ARRAY(sa.Integer())", 109 | ) 110 | 111 | eq_ignore_whitespace( 112 | autogenerate.render._repr_type( 113 | EXT_ARRAY(sqla_types.DateTime(timezone=True)), 114 | self.autogen_context, 115 | ), 116 | "custom_dialect_types.EXT_ARRAY(sa.DateTime(timezone=True))", 117 | ) 118 | 119 | eq_( 120 | self.autogen_context.imports, 121 | { 122 | "from tests.test_external_dialect " 123 | "import custom_dialect_types" 124 | }, 125 | ) 126 | 127 | def test_external_nested_render_external_type(self): 128 | eq_ignore_whitespace( 129 | autogenerate.render._repr_type( 130 | EXT_ARRAY(FOOBARTYPE), self.autogen_context 131 | ), 132 | "custom_dialect_types.EXT_ARRAY" 133 | "(custom_dialect_types.FOOBARTYPE())", 134 | ) 135 | 136 | eq_( 137 | self.autogen_context.imports, 138 | { 139 | "from tests.test_external_dialect " 140 | "import custom_dialect_types" 141 | }, 142 | ) 143 | -------------------------------------------------------------------------------- /tests/test_impl.py: -------------------------------------------------------------------------------- 1 | from sqlalchemy import Column 2 | from sqlalchemy import Integer 3 | from sqlalchemy import Table 4 | from sqlalchemy.sql import text 5 | 6 | from alembic import testing 7 | from alembic.testing import eq_ 8 | from alembic.testing.fixtures import FutureEngineMixin 9 | from alembic.testing.fixtures import TablesTest 10 | 11 | 12 | class ImplTest(TablesTest): 13 | __only_on__ = "sqlite" 14 | 15 | @classmethod 16 | def define_tables(cls, metadata): 17 | Table( 18 | "some_table", metadata, Column("x", Integer), Column("y", Integer) 19 | ) 20 | 21 | @testing.fixture 22 | def impl(self, migration_context): 23 | with migration_context.begin_transaction(_per_migration=True): 24 | yield migration_context.impl 25 | 26 | @testing.fixture 27 | def as_sql_impl(self, as_sql_migration_context): 28 | with as_sql_migration_context.begin_transaction(_per_migration=True): 29 | yield as_sql_migration_context.impl 30 | 31 | def test_execute_params(self, impl): 32 | result = impl._exec(text("select :my_param"), params={"my_param": 5}) 33 | eq_(result.scalar(), 5) 34 | 35 | def test_execute_multiparams(self, impl): 36 | some_table = self.tables.some_table 37 | impl._exec( 38 | some_table.insert(), 39 | multiparams=[{"x": 1, "y": 2}, {"x": 2, "y": 3}, {"x": 5, "y": 7}], 40 | ) 41 | eq_( 42 | impl._exec( 43 | some_table.select().order_by(some_table.c.x) 44 | ).fetchall(), 45 | [(1, 2), (2, 3), (5, 7)], 46 | ) 47 | 48 | def test_dont_send_both(self, impl): 49 | with testing.expect_raises_message( 50 | TypeError, "Can't send params and multiparams at the same time" 51 | ): 52 | impl._exec( 53 | text("select :my_param"), 54 | params={"my_param": 5}, 55 | multiparams=[], 56 | ) 57 | 58 | def test_no_params_w_as_sql(self, as_sql_impl): 59 | with testing.expect_raises_message( 60 | TypeError, "SQL parameters not allowed with as_sql" 61 | ): 62 | as_sql_impl._exec(text("select :my_param"), params={"my_param": 5}) 63 | 64 | def test_no_multiparams_w_as_sql(self, as_sql_impl): 65 | with testing.expect_raises_message( 66 | TypeError, "SQL parameters not allowed with as_sql" 67 | ): 68 | as_sql_impl._exec(text("select :my_param"), multiparams=[]) 69 | 70 | 71 | class FutureImplTest(FutureEngineMixin, ImplTest): 72 | pass 73 | -------------------------------------------------------------------------------- /tests/test_messaging.py: -------------------------------------------------------------------------------- 1 | from io import StringIO 2 | 3 | from alembic.testing import eq_ 4 | from alembic.testing import mock 5 | from alembic.testing.fixtures import TestBase 6 | from alembic.util.messaging import msg 7 | from alembic.util.messaging import obfuscate_url_pw 8 | 9 | 10 | class MessagingTest(TestBase): 11 | def test_msg_wraps(self): 12 | buf = StringIO() 13 | with ( 14 | mock.patch("sys.stdout", buf), 15 | mock.patch("alembic.util.messaging.TERMWIDTH", 10), 16 | ): 17 | msg("AAAAAAAAAAAAAAAAA") 18 | eq_( 19 | str(buf.getvalue()).splitlines(), 20 | [ 21 | " AAAAAAAA", # initial indent 10 chars before wrapping 22 | " AAAAAAAA", # subsequent indent 10 chars before wrapping 23 | " A", # subsequent indent with remainining chars 24 | ], 25 | ) 26 | 27 | def test_current_obfuscate_password(self): 28 | eq_( 29 | obfuscate_url_pw("postgresql://scott:tiger@localhost/test"), 30 | "postgresql://scott:***@localhost/test", 31 | ) 32 | -------------------------------------------------------------------------------- /tests/test_op_naming_convention.py: -------------------------------------------------------------------------------- 1 | from sqlalchemy import Boolean 2 | from sqlalchemy import CheckConstraint 3 | from sqlalchemy import Column 4 | from sqlalchemy import Integer 5 | from sqlalchemy import MetaData 6 | from sqlalchemy import Table 7 | from sqlalchemy.sql import column 8 | from sqlalchemy.sql import func 9 | 10 | from alembic import op 11 | from alembic.testing.fixtures import op_fixture 12 | from alembic.testing.fixtures import TestBase 13 | 14 | 15 | class AutoNamingConventionTest(TestBase): 16 | def test_add_check_constraint(self): 17 | context = op_fixture( 18 | naming_convention={"ck": "ck_%(table_name)s_%(constraint_name)s"} 19 | ) 20 | op.create_check_constraint( 21 | "foo", "user_table", func.len(column("name")) > 5 22 | ) 23 | context.assert_( 24 | "ALTER TABLE user_table ADD CONSTRAINT ck_user_table_foo " 25 | "CHECK (len(name) > 5)" 26 | ) 27 | 28 | def test_add_check_constraint_name_is_none(self): 29 | context = op_fixture(naming_convention={"ck": "ck_%(table_name)s_foo"}) 30 | op.create_check_constraint( 31 | None, "user_table", func.len(column("name")) > 5 32 | ) 33 | context.assert_( 34 | "ALTER TABLE user_table ADD CONSTRAINT ck_user_table_foo " 35 | "CHECK (len(name) > 5)" 36 | ) 37 | 38 | def test_add_unique_constraint_name_is_none(self): 39 | context = op_fixture(naming_convention={"uq": "uq_%(table_name)s_foo"}) 40 | op.create_unique_constraint(None, "user_table", "x") 41 | context.assert_( 42 | "ALTER TABLE user_table " 43 | "ADD CONSTRAINT uq_user_table_foo UNIQUE (x)" 44 | ) 45 | 46 | def test_add_index_name_is_none(self): 47 | context = op_fixture(naming_convention={"ix": "ix_%(table_name)s_foo"}) 48 | op.create_index(None, "user_table", "x") 49 | context.assert_("CREATE INDEX ix_user_table_foo ON user_table (x)") 50 | 51 | def test_add_check_constraint_already_named_from_schema(self): 52 | m1 = MetaData( 53 | naming_convention={"ck": "ck_%(table_name)s_%(constraint_name)s"} 54 | ) 55 | ck = CheckConstraint("im a constraint", name="cc1") 56 | Table("t", m1, Column("x"), ck) 57 | 58 | context = op_fixture( 59 | naming_convention={"ck": "ck_%(table_name)s_%(constraint_name)s"} 60 | ) 61 | 62 | op.create_table("some_table", Column("x", Integer, ck)) 63 | context.assert_( 64 | "CREATE TABLE some_table " 65 | "(x INTEGER CONSTRAINT ck_t_cc1 CHECK (im a constraint))" 66 | ) 67 | 68 | def test_add_check_constraint_inline_on_table(self): 69 | context = op_fixture( 70 | naming_convention={"ck": "ck_%(table_name)s_%(constraint_name)s"} 71 | ) 72 | op.create_table( 73 | "some_table", 74 | Column("x", Integer), 75 | CheckConstraint("im a constraint", name="cc1"), 76 | ) 77 | context.assert_( 78 | "CREATE TABLE some_table " 79 | "(x INTEGER, CONSTRAINT ck_some_table_cc1 CHECK (im a constraint))" 80 | ) 81 | 82 | def test_add_check_constraint_inline_on_table_w_f(self): 83 | context = op_fixture( 84 | naming_convention={"ck": "ck_%(table_name)s_%(constraint_name)s"} 85 | ) 86 | op.create_table( 87 | "some_table", 88 | Column("x", Integer), 89 | CheckConstraint("im a constraint", name=op.f("ck_some_table_cc1")), 90 | ) 91 | context.assert_( 92 | "CREATE TABLE some_table " 93 | "(x INTEGER, CONSTRAINT ck_some_table_cc1 CHECK (im a constraint))" 94 | ) 95 | 96 | def test_add_check_constraint_inline_on_column(self): 97 | context = op_fixture( 98 | naming_convention={"ck": "ck_%(table_name)s_%(constraint_name)s"} 99 | ) 100 | op.create_table( 101 | "some_table", 102 | Column( 103 | "x", Integer, CheckConstraint("im a constraint", name="cc1") 104 | ), 105 | ) 106 | context.assert_( 107 | "CREATE TABLE some_table " 108 | "(x INTEGER CONSTRAINT ck_some_table_cc1 CHECK (im a constraint))" 109 | ) 110 | 111 | def test_add_check_constraint_inline_on_column_w_f(self): 112 | context = op_fixture( 113 | naming_convention={"ck": "ck_%(table_name)s_%(constraint_name)s"} 114 | ) 115 | op.create_table( 116 | "some_table", 117 | Column( 118 | "x", 119 | Integer, 120 | CheckConstraint("im a constraint", name=op.f("ck_q_cc1")), 121 | ), 122 | ) 123 | context.assert_( 124 | "CREATE TABLE some_table " 125 | "(x INTEGER CONSTRAINT ck_q_cc1 CHECK (im a constraint))" 126 | ) 127 | 128 | def test_add_column_schema_type(self): 129 | context = op_fixture( 130 | naming_convention={"ck": "ck_%(table_name)s_%(constraint_name)s"} 131 | ) 132 | op.add_column( 133 | "t1", 134 | Column( 135 | "c1", 136 | Boolean(name="foo", create_constraint=True), 137 | nullable=False, 138 | ), 139 | ) 140 | context.assert_( 141 | "ALTER TABLE t1 ADD COLUMN c1 BOOLEAN NOT NULL", 142 | "ALTER TABLE t1 ADD CONSTRAINT ck_t1_foo CHECK (c1 IN (0, 1))", 143 | ) 144 | 145 | def test_add_column_schema_type_w_f(self): 146 | context = op_fixture( 147 | naming_convention={"ck": "ck_%(table_name)s_%(constraint_name)s"} 148 | ) 149 | op.add_column( 150 | "t1", 151 | Column( 152 | "c1", 153 | Boolean(name=op.f("foo"), create_constraint=True), 154 | nullable=False, 155 | ), 156 | ) 157 | context.assert_( 158 | "ALTER TABLE t1 ADD COLUMN c1 BOOLEAN NOT NULL", 159 | "ALTER TABLE t1 ADD CONSTRAINT foo CHECK (c1 IN (0, 1))", 160 | ) 161 | 162 | def test_drop_check_constraint_plain(self): 163 | context = op_fixture( 164 | naming_convention={"ck": "ck_%(table_name)s_%(constraint_name)s"} 165 | ) 166 | 167 | op.drop_constraint("foo_bar_bat", "t1", type_="check") 168 | context.assert_("ALTER TABLE t1 DROP CONSTRAINT ck_t1_foo_bar_bat") 169 | 170 | def test_drop_check_constraint_opf(self): 171 | context = op_fixture( 172 | naming_convention={"ck": "ck_%(table_name)s_%(constraint_name)s"} 173 | ) 174 | 175 | op.drop_constraint( 176 | op.f("some_specific_foo_bar_bat"), "t1", type_="check" 177 | ) 178 | context.assert_( 179 | "ALTER TABLE t1 DROP CONSTRAINT some_specific_foo_bar_bat" 180 | ) 181 | -------------------------------------------------------------------------------- /tests/test_stubs.py: -------------------------------------------------------------------------------- 1 | import difflib 2 | from pathlib import Path 3 | import subprocess 4 | import sys 5 | 6 | import alembic 7 | from alembic.testing import combinations 8 | from alembic.testing import eq_ 9 | from alembic.testing import TestBase 10 | 11 | _home = Path(__file__).parent.parent 12 | 13 | 14 | def run_command(file): 15 | res = subprocess.run( 16 | [ 17 | sys.executable, 18 | str((_home / "tools" / "write_pyi.py").relative_to(_home)), 19 | "--stdout", 20 | "--name", 21 | file, 22 | ], 23 | stdout=subprocess.PIPE, 24 | cwd=_home, 25 | encoding="utf-8", 26 | ) 27 | return res 28 | 29 | 30 | class TestStubFiles(TestBase): 31 | __requires__ = ("stubs_test",) 32 | 33 | def test_op_pyi(self): 34 | res = run_command("op") 35 | generated = res.stdout 36 | file_path = Path(alembic.__file__).parent / "op.pyi" 37 | expected = file_path.read_text() 38 | eq_(generated, expected, compare(generated, expected)) 39 | 40 | def test_context_pyi(self): 41 | res = run_command("context") 42 | generated = res.stdout 43 | file_path = Path(alembic.__file__).parent / "context.pyi" 44 | expected = file_path.read_text() 45 | eq_(generated, expected, compare(generated, expected)) 46 | 47 | @combinations("batch_op", "op_cls") 48 | def test_operation_base_file(self, name): 49 | res = run_command(name) 50 | generated = res.stdout 51 | file_path = Path(alembic.__file__).parent / "operations/base.py" 52 | expected = file_path.read_text() 53 | eq_(generated, expected, compare(generated, expected)) 54 | 55 | 56 | def compare(actual: str, expected: str): 57 | diff = difflib.unified_diff( 58 | actual.splitlines(), 59 | expected.splitlines(), 60 | fromfile="generated", 61 | tofile="expected", 62 | ) 63 | return "\n".join(diff) 64 | -------------------------------------------------------------------------------- /tests/test_suite.py: -------------------------------------------------------------------------------- 1 | from alembic.testing.suite import * # noqa 2 | -------------------------------------------------------------------------------- /tox.ini: -------------------------------------------------------------------------------- 1 | [tox] 2 | 3 | envlist = py-sqlalchemy 4 | 5 | SQLA_REPO = {env:SQLA_REPO:git+https://github.com/sqlalchemy/sqlalchemy.git} 6 | BLACK_VERSION = 25.1.0 7 | 8 | [testenv] 9 | cov_args=--cov=alembic --cov-report term --cov-report xml 10 | 11 | deps=pytest>4.6,<8.4 12 | pytest-xdist 13 | sqla14: {[tox]SQLA_REPO}@rel_1_4#egg=sqlalchemy 14 | sqla20: {[tox]SQLA_REPO}@rel_2_0#egg=sqlalchemy 15 | sqlamain: {[tox]SQLA_REPO}#egg=sqlalchemy 16 | postgresql: psycopg2>=2.7 17 | mysql: mysqlclient>=1.4.0 18 | mysql: pymysql 19 | oracle: cx_oracle>=7 20 | mssql: pyodbc 21 | cov: pytest-cov 22 | sqlalchemy: sqlalchemy>=1.4.0 23 | mako 24 | tzdata 25 | zimports 26 | black=={[tox]BLACK_VERSION} 27 | greenlet>=1 28 | 29 | 30 | 31 | usedevelop= 32 | cov: True 33 | 34 | # only use --dropfirst option if we're *not* using -n; 35 | # if -n is used, we're working in brand new DBs anyway 36 | setenv= 37 | BASECOMMAND=python -m pytest {tty:--color=yes} --rootdir {toxinidir} 38 | WORKERS={env:TOX_WORKERS:-n4} 39 | cov: COVERAGE={[testenv]cov_args} 40 | sqlite: SQLITE={env:TOX_SQLITE:--db sqlite} 41 | postgresql: POSTGRESQL={env:TOX_POSTGRESQL:--db postgresql} 42 | mysql: MYSQL={env:TOX_MYSQL:--db mysql} 43 | oracle: ORACLE={env:TOX_ORACLE:--db oracle} --low-connections --write-idents db_idents.txt 44 | mssql: MSSQL={env:TOX_MSSQL:--db mssql} 45 | pyoptimize: PYTHONOPTIMIZE=1 46 | pyoptimize: LIMITTESTS="tests/test_script_consumption.py" 47 | future: SQLALCHEMY_TESTING_FUTURE_ENGINE=1 48 | SQLALCHEMY_WARN_20=1 49 | 50 | 51 | # tox as of 2.0 blocks all environment variables from the 52 | # outside, unless they are here (or in TOX_TESTENV_PASSENV, 53 | # wildcards OK). Need at least these 54 | passenv= 55 | ORACLE_HOME 56 | NLS_LANG 57 | TOX_SQLITE 58 | TOX_POSTGRESQL 59 | TOX_MYSQL 60 | TOX_ORACLE 61 | TOX_MSSQL 62 | 63 | commands= 64 | {env:BASECOMMAND} {env:WORKERS} {env:SQLITE:} {env:POSTGRESQL:} {env:MYSQL:} {env:ORACLE:} {env:MSSQL:} {env:BACKENDONLY:} {env:COVERAGE:} {env:LIMITTESTS:} {posargs} 65 | {oracle,mssql}: python reap_dbs.py db_idents.txt 66 | 67 | 68 | [testenv:pep484] 69 | basepython = 70 | python312 71 | python313 72 | deps= 73 | mypy 74 | sqlalchemy>=2 75 | mako 76 | # is imported in alembic/testing and mypy complains if it's not installed. 77 | pytest 78 | commands = mypy ./alembic/ --exclude alembic/templates 79 | 80 | [testenv:mypy] 81 | basepython = {[testenv:pep484]basepython} 82 | deps= 83 | {[testenv:pep484]deps} 84 | commands = {[testenv:pep484]commands} 85 | 86 | [testenv:pep8] 87 | basepython = python3 88 | deps= 89 | flake8 90 | flake8-import-order 91 | flake8-import-single==0.1.5 92 | flake8-builtins 93 | flake8-docstrings 94 | flake8-rst-docstrings 95 | pydocstyle<4.0.0 96 | # used by flake8-rst-docstrings 97 | pygments 98 | black=={[tox]BLACK_VERSION} 99 | commands = 100 | flake8 ./alembic/ ./tests/ setup.py docs/build/conf.py {posargs} 101 | black --check setup.py tests alembic 102 | 103 | [testenv:write_pyi] 104 | basepython = python3 105 | deps= 106 | sqlalchemy>=2 107 | mako 108 | zimports 109 | black=={[tox]BLACK_VERSION} 110 | commands = python tools/write_pyi.py 111 | --------------------------------------------------------------------------------