├── .github └── workflows │ └── test.yml ├── .gitignore ├── .isort.cfg ├── .pylintrc ├── AUTHORS ├── Dockerfile ├── LICENSE ├── Makefile ├── README.md ├── doc ├── foodb │ ├── callbacks │ │ ├── afterAll │ │ │ └── 00_dummy_after_all.sql │ │ ├── afterEach │ │ │ └── 00_dummy_after_each.sql │ │ ├── beforeAll │ │ │ └── 00_create_database_ops.sql │ │ └── beforeEach │ │ │ └── 00_dummy_before_each.sql │ ├── grants │ │ └── foo.sql │ ├── migrations.yml │ └── migrations │ │ ├── V0001__Initial_schema_foo.sql │ │ ├── V0002__Add_baz_column_to_foo.sql │ │ └── V0003__NONTRANSACTIONAL_Add_index_on_baz_column.sql └── tutorial.md ├── features ├── baseline.feature ├── clean.feature ├── config.feature ├── conflicting_pids.feature ├── dryrun.feature ├── empty_is_uninitialized.feature ├── environment.py ├── errors_handle.feature ├── info.feature ├── migrations_from_dir.feature ├── modeline.feature ├── nontransactional_migrations.feature ├── schema.feature └── steps │ ├── callbacks.py │ ├── check_if_contains_schema_version.py │ ├── check_if_empty.py │ ├── config.py │ ├── conflicting_versions.py │ ├── database_and_connection.py │ ├── migration.py │ ├── migration_dir.py │ ├── migration_failed.py │ ├── migration_info_contains_forced_baseline.py │ ├── migration_info_contains_single_migration.py │ ├── migration_list_empty.py │ ├── migration_list_equals_single_transactional_migration.py │ ├── migration_passed.py │ ├── pgmigrate_output.py │ ├── query.py │ └── run_pgmigrate.py ├── pgmigrate.py ├── run_test.sh ├── setup.py └── tox.ini /.github/workflows/test.yml: -------------------------------------------------------------------------------- 1 | name: Test 2 | 3 | on: 4 | push: {} 5 | pull_request: {} 6 | workflow_dispatch: {} 7 | schedule: 8 | - cron: '30 06 * * *' 9 | 10 | jobs: 11 | test: 12 | name: Run tests and linters 13 | runs-on: ubuntu-24.04 14 | env: 15 | LANG: en_US.utf8 16 | DEBIAN_FRONTEND: noninteractive 17 | PG_MAJOR: 17 18 | container: 19 | image: ubuntu:noble 20 | steps: 21 | - uses: actions/checkout@v4 22 | - name: Set locale to utf-8 23 | run: | 24 | apt-get update 25 | apt-get install -y ca-certificates locales 26 | localedef -i en_US -c -f UTF-8 -A /usr/share/locale/locale.alias en_US.UTF-8 27 | - name: Install wget 28 | run: | 29 | apt-get install -y wget 30 | rm -rf /var/lib/apt/lists/* 31 | - name: Check for spelling typos 32 | uses: crate-ci/typos@master 33 | - name: Set postgresql uid/gid 34 | run: | 35 | groupadd -r postgres --gid=999 36 | useradd -r -d /var/lib/postgresql -g postgres --uid=999 postgres 37 | - name: Install PostgreSQL ${PG_MAJOR} and Python 38 | run: | 39 | echo 'deb http://apt.postgresql.org/pub/repos/apt/ noble-pgdg main' $PG_MAJOR > /etc/apt/sources.list.d/pgdg.list 40 | echo 'deb https://ppa.launchpadcontent.net/deadsnakes/ppa/ubuntu/ noble main' > /etc/apt/sources.list.d/deadsnakes-ubuntu-ppa.list 41 | apt-get -o Acquire::AllowInsecureRepositories=true -o Acquire::AllowDowngradeToInsecureRepositories=true update 42 | apt-get -o Dpkg::Options::="--force-confdef" -o Dpkg::Options::="--force-confold" -o APT::Get::AllowUnauthenticated=true install -y postgresql-common sudo libpq-dev python3.11-dev python3.11-lib2to3 build-essential curl postgresql-$PG_MAJOR postgresql-contrib-$PG_MAJOR 43 | curl https://bootstrap.pypa.io/get-pip.py -o get-pip.py 44 | python3.11 get-pip.py 45 | pip3.11 install tox 46 | - name: Start PostgreSQL 47 | run: | 48 | chown -R postgres:postgres . 49 | mkdir -p /var/log/postgresql 50 | chown postgres:postgres /var/log/postgresql 51 | sudo -u postgres /usr/lib/postgresql/${PG_MAJOR}/bin/pg_ctl -D /etc/postgresql/${PG_MAJOR}/main -l /var/log/postgresql/postgresql-${PG_MAJOR}-main.log start 52 | - name: Run tox 53 | run: sudo -u postgres tox 54 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | 5 | # C extensions 6 | *.so 7 | 8 | # Distribution / packaging 9 | .Python 10 | env/ 11 | bin/ 12 | build/ 13 | develop-eggs/ 14 | dist/ 15 | eggs/ 16 | lib/ 17 | lib64/ 18 | parts/ 19 | sdist/ 20 | var/ 21 | *.egg-info/ 22 | .installed.cfg 23 | *.egg 24 | 25 | # Installer logs 26 | pip-log.txt 27 | pip-delete-this-directory.txt 28 | 29 | # Unit test / coverage reports 30 | htmlcov/ 31 | .tox/ 32 | .coverage 33 | .cache 34 | nosetests.xml 35 | coverage.xml 36 | 37 | # Translations 38 | *.mo 39 | 40 | # Mr Developer 41 | .mr.developer.cfg 42 | .project 43 | .pydevproject 44 | 45 | # Rope 46 | .ropeproject 47 | 48 | # Django stuff: 49 | *.log 50 | *.pot 51 | 52 | # Sphinx documentation 53 | docs/_build/ 54 | 55 | -------------------------------------------------------------------------------- /.isort.cfg: -------------------------------------------------------------------------------- 1 | [settings] 2 | known_third_party=psycopg2,sqlparse,yaml 3 | -------------------------------------------------------------------------------- /.pylintrc: -------------------------------------------------------------------------------- 1 | [MAIN] 2 | 3 | # Analyse import fallback blocks. This can be used to support both Python 2 and 4 | # 3 compatible code, which means that the block might have code that exists 5 | # only in one or another interpreter, leading to false positives when analysed. 6 | analyse-fallback-blocks=no 7 | 8 | # Clear in-memory caches upon conclusion of linting. Useful if running pylint 9 | # in a server-like mode. 10 | clear-cache-post-run=no 11 | 12 | # Load and enable all available extensions. Use --list-extensions to see a list 13 | # all available extensions. 14 | #enable-all-extensions= 15 | 16 | # In error mode, messages with a category besides ERROR or FATAL are 17 | # suppressed, and no reports are done by default. Error mode is compatible with 18 | # disabling specific errors. 19 | #errors-only= 20 | 21 | # Always return a 0 (non-error) status code, even if lint errors are found. 22 | # This is primarily useful in continuous integration scripts. 23 | #exit-zero= 24 | 25 | # A comma-separated list of package or module names from where C extensions may 26 | # be loaded. Extensions are loading into the active Python interpreter and may 27 | # run arbitrary code. 28 | extension-pkg-allow-list= 29 | 30 | # A comma-separated list of package or module names from where C extensions may 31 | # be loaded. Extensions are loading into the active Python interpreter and may 32 | # run arbitrary code. (This is an alternative name to extension-pkg-allow-list 33 | # for backward compatibility.) 34 | extension-pkg-whitelist= 35 | 36 | # Return non-zero exit code if any of these messages/categories are detected, 37 | # even if score is above --fail-under value. Syntax same as enable. Messages 38 | # specified are enabled, while categories only check already-enabled messages. 39 | fail-on= 40 | 41 | # Specify a score threshold under which the program will exit with error. 42 | fail-under=10 43 | 44 | # Interpret the stdin as a python script, whose filename needs to be passed as 45 | # the module_or_package argument. 46 | #from-stdin= 47 | 48 | # Files or directories to be skipped. They should be base names, not paths. 49 | ignore=CVS 50 | 51 | # Add files or directories matching the regular expressions patterns to the 52 | # ignore-list. The regex matches against paths and can be in Posix or Windows 53 | # format. Because '\\' represents the directory delimiter on Windows systems, 54 | # it can't be used as an escape character. 55 | ignore-paths= 56 | 57 | # Files or directories matching the regular expression patterns are skipped. 58 | # The regex matches against base names, not paths. The default value ignores 59 | # Emacs file locks 60 | ignore-patterns=^\.# 61 | 62 | # List of module names for which member attributes should not be checked 63 | # (useful for modules/projects where namespaces are manipulated during runtime 64 | # and thus existing member attributes cannot be deduced by static analysis). It 65 | # supports qualified module names, as well as Unix pattern matching. 66 | ignored-modules= 67 | 68 | # Python code to execute, usually for sys.path manipulation such as 69 | # pygtk.require(). 70 | #init-hook= 71 | 72 | # Use multiple processes to speed up Pylint. Specifying 0 will auto-detect the 73 | # number of processors available to use, and will cap the count on Windows to 74 | # avoid hangs. 75 | jobs=1 76 | 77 | # Control the amount of potential inferred values when inferring a single 78 | # object. This can help the performance when dealing with large functions or 79 | # complex, nested conditions. 80 | limit-inference-results=100 81 | 82 | # List of plugins (as comma separated values of python module names) to load, 83 | # usually to register additional checkers. 84 | load-plugins= 85 | 86 | # Pickle collected data for later comparisons. 87 | persistent=yes 88 | 89 | # Minimum Python version to use for version dependent checks. Will default to 90 | # the version used to run pylint. 91 | py-version=3.11 92 | 93 | # Discover python modules and packages in the file system subtree. 94 | recursive=no 95 | 96 | # When enabled, pylint would attempt to guess common misconfiguration and emit 97 | # user-friendly hints instead of false-positive error messages. 98 | suggestion-mode=yes 99 | 100 | # Allow loading of arbitrary C extensions. Extensions are imported into the 101 | # active Python interpreter and may run arbitrary code. 102 | unsafe-load-any-extension=no 103 | 104 | # In verbose mode, extra non-checker-related info will be displayed. 105 | #verbose= 106 | 107 | 108 | [BASIC] 109 | 110 | # Naming style matching correct argument names. 111 | argument-naming-style=snake_case 112 | 113 | # Regular expression matching correct argument names. Overrides argument- 114 | # naming-style. If left empty, argument names will be checked with the set 115 | # naming style. 116 | #argument-rgx= 117 | 118 | # Naming style matching correct attribute names. 119 | attr-naming-style=snake_case 120 | 121 | # Regular expression matching correct attribute names. Overrides attr-naming- 122 | # style. If left empty, attribute names will be checked with the set naming 123 | # style. 124 | #attr-rgx= 125 | 126 | # Bad variable names which should always be refused, separated by a comma. 127 | bad-names=foo, 128 | bar, 129 | baz, 130 | toto, 131 | tutu, 132 | tata 133 | 134 | # Bad variable names regexes, separated by a comma. If names match any regex, 135 | # they will always be refused 136 | bad-names-rgxs= 137 | 138 | # Naming style matching correct class attribute names. 139 | class-attribute-naming-style=any 140 | 141 | # Regular expression matching correct class attribute names. Overrides class- 142 | # attribute-naming-style. If left empty, class attribute names will be checked 143 | # with the set naming style. 144 | #class-attribute-rgx= 145 | 146 | # Naming style matching correct class constant names. 147 | class-const-naming-style=UPPER_CASE 148 | 149 | # Regular expression matching correct class constant names. Overrides class- 150 | # const-naming-style. If left empty, class constant names will be checked with 151 | # the set naming style. 152 | #class-const-rgx= 153 | 154 | # Naming style matching correct class names. 155 | class-naming-style=PascalCase 156 | 157 | # Regular expression matching correct class names. Overrides class-naming- 158 | # style. If left empty, class names will be checked with the set naming style. 159 | #class-rgx= 160 | 161 | # Naming style matching correct constant names. 162 | const-naming-style=UPPER_CASE 163 | 164 | # Regular expression matching correct constant names. Overrides const-naming- 165 | # style. If left empty, constant names will be checked with the set naming 166 | # style. 167 | #const-rgx= 168 | 169 | # Minimum line length for functions/classes that require docstrings, shorter 170 | # ones are exempt. 171 | docstring-min-length=-1 172 | 173 | # Naming style matching correct function names. 174 | function-naming-style=snake_case 175 | 176 | # Regular expression matching correct function names. Overrides function- 177 | # naming-style. If left empty, function names will be checked with the set 178 | # naming style. 179 | #function-rgx= 180 | 181 | # Good variable names which should always be accepted, separated by a comma. 182 | good-names=i, 183 | j, 184 | k, 185 | ex, 186 | Run, 187 | _ 188 | 189 | # Good variable names regexes, separated by a comma. If names match any regex, 190 | # they will always be accepted 191 | good-names-rgxs= 192 | 193 | # Include a hint for the correct naming format with invalid-name. 194 | include-naming-hint=no 195 | 196 | # Naming style matching correct inline iteration names. 197 | inlinevar-naming-style=any 198 | 199 | # Regular expression matching correct inline iteration names. Overrides 200 | # inlinevar-naming-style. If left empty, inline iteration names will be checked 201 | # with the set naming style. 202 | #inlinevar-rgx= 203 | 204 | # Naming style matching correct method names. 205 | method-naming-style=snake_case 206 | 207 | # Regular expression matching correct method names. Overrides method-naming- 208 | # style. If left empty, method names will be checked with the set naming style. 209 | #method-rgx= 210 | 211 | # Naming style matching correct module names. 212 | module-naming-style=snake_case 213 | 214 | # Regular expression matching correct module names. Overrides module-naming- 215 | # style. If left empty, module names will be checked with the set naming style. 216 | #module-rgx= 217 | 218 | # Colon-delimited sets of names that determine each other's naming style when 219 | # the name regexes allow several styles. 220 | name-group= 221 | 222 | # Regular expression which should only match function or class names that do 223 | # not require a docstring. 224 | no-docstring-rgx=^_ 225 | 226 | # List of decorators that produce properties, such as abc.abstractproperty. Add 227 | # to this list to register other decorators that produce valid properties. 228 | # These decorators are taken in consideration only for invalid-name. 229 | property-classes=abc.abstractproperty 230 | 231 | # Regular expression matching correct type variable names. If left empty, type 232 | # variable names will be checked with the set naming style. 233 | #typevar-rgx= 234 | 235 | # Naming style matching correct variable names. 236 | variable-naming-style=snake_case 237 | 238 | # Regular expression matching correct variable names. Overrides variable- 239 | # naming-style. If left empty, variable names will be checked with the set 240 | # naming style. 241 | #variable-rgx= 242 | 243 | 244 | [CLASSES] 245 | 246 | # Warn about protected attribute access inside special methods 247 | check-protected-access-in-special-methods=no 248 | 249 | # List of method names used to declare (i.e. assign) instance attributes. 250 | defining-attr-methods=__init__, 251 | __new__, 252 | setUp, 253 | __post_init__ 254 | 255 | # List of member names, which should be excluded from the protected access 256 | # warning. 257 | exclude-protected=_asdict, 258 | _fields, 259 | _replace, 260 | _source, 261 | _make 262 | 263 | # List of valid names for the first argument in a class method. 264 | valid-classmethod-first-arg=cls 265 | 266 | # List of valid names for the first argument in a metaclass class method. 267 | valid-metaclass-classmethod-first-arg=mcs 268 | 269 | 270 | [DESIGN] 271 | 272 | # List of regular expressions of class ancestor names to ignore when counting 273 | # public methods (see R0903) 274 | exclude-too-few-public-methods= 275 | 276 | # List of qualified class names to ignore when counting class parents (see 277 | # R0901) 278 | ignored-parents= 279 | 280 | # Maximum number of arguments for function / method. 281 | max-args=5 282 | 283 | # Maximum number of attributes for a class (see R0902). 284 | max-attributes=7 285 | 286 | # Maximum number of boolean expressions in an if statement (see R0916). 287 | max-bool-expr=5 288 | 289 | # Maximum number of branch for function / method body. 290 | max-branches=15 291 | 292 | # Maximum number of locals for function / method body. 293 | max-locals=15 294 | 295 | # Maximum number of parents for a class (see R0901). 296 | max-parents=7 297 | 298 | # Maximum number of public methods for a class (see R0904). 299 | max-public-methods=20 300 | 301 | # Maximum number of return / yield for function / method body. 302 | max-returns=6 303 | 304 | # Maximum number of statements in function / method body. 305 | max-statements=50 306 | 307 | # Minimum number of public methods for a class (see R0903). 308 | min-public-methods=2 309 | 310 | 311 | [EXCEPTIONS] 312 | 313 | # Exceptions that will emit a warning when caught. 314 | overgeneral-exceptions=builtins.BaseException,builtins.Exception 315 | 316 | 317 | [FORMAT] 318 | 319 | # Expected format of line ending, e.g. empty (any line ending), LF or CRLF. 320 | expected-line-ending-format= 321 | 322 | # Regexp for a line that is allowed to be longer than the limit. 323 | ignore-long-lines=^\s*(# )??$ 324 | 325 | # Number of spaces of indent required inside a hanging or continued line. 326 | indent-after-paren=4 327 | 328 | # String used as indentation unit. This is usually " " (4 spaces) or "\t" (1 329 | # tab). 330 | indent-string=' ' 331 | 332 | # Maximum number of characters on a single line. 333 | max-line-length=100 334 | 335 | # Maximum number of lines in a module. 336 | max-module-lines=1000 337 | 338 | # Allow the body of a class to be on the same line as the declaration if body 339 | # contains single statement. 340 | single-line-class-stmt=no 341 | 342 | # Allow the body of an if to be on the same line as the test if there is no 343 | # else. 344 | single-line-if-stmt=no 345 | 346 | 347 | [IMPORTS] 348 | 349 | # List of modules that can be imported at any level, not just the top level 350 | # one. 351 | allow-any-import-level= 352 | 353 | # Allow explicit reexports by alias from a package __init__. 354 | allow-reexport-from-package=no 355 | 356 | # Allow wildcard imports from modules that define __all__. 357 | allow-wildcard-with-all=no 358 | 359 | # Deprecated modules which should not be used, separated by a comma. 360 | deprecated-modules= 361 | 362 | # Output a graph (.gv or any supported image format) of external dependencies 363 | # to the given file (report RP0402 must not be disabled). 364 | ext-import-graph= 365 | 366 | # Output a graph (.gv or any supported image format) of all (i.e. internal and 367 | # external) dependencies to the given file (report RP0402 must not be 368 | # disabled). 369 | import-graph= 370 | 371 | # Output a graph (.gv or any supported image format) of internal dependencies 372 | # to the given file (report RP0402 must not be disabled). 373 | int-import-graph= 374 | 375 | # Force import order to recognize a module as part of the standard 376 | # compatibility libraries. 377 | known-standard-library= 378 | 379 | # Force import order to recognize a module as part of a third party library. 380 | known-third-party=enchant 381 | 382 | # Couples of modules and preferred modules, separated by a comma. 383 | preferred-modules= 384 | 385 | 386 | [LOGGING] 387 | 388 | # The type of string formatting that logging methods do. `old` means using % 389 | # formatting, `new` is for `{}` formatting. 390 | logging-format-style=old 391 | 392 | # Logging modules to check that the string format arguments are in logging 393 | # function parameter format. 394 | logging-modules=logging 395 | 396 | 397 | [MESSAGES CONTROL] 398 | 399 | # Only show warnings with the listed confidence levels. Leave empty to show 400 | # all. Valid levels: HIGH, CONTROL_FLOW, INFERENCE, INFERENCE_FAILURE, 401 | # UNDEFINED. 402 | confidence=HIGH, 403 | CONTROL_FLOW, 404 | INFERENCE, 405 | INFERENCE_FAILURE, 406 | UNDEFINED 407 | 408 | # Disable the message, report, category or checker with the given id(s). You 409 | # can either give multiple identifiers separated by comma (,) or put this 410 | # option multiple times (only on the command line, not in the configuration 411 | # file where it should appear only once). You can also use "--disable=all" to 412 | # disable everything first and then re-enable specific checks. For example, if 413 | # you want to run only the similarities checker, you can use "--disable=all 414 | # --enable=similarities". If you want to run only the classes checker, but have 415 | # no Warning level messages displayed, use "--disable=all --enable=classes 416 | # --disable=W". 417 | disable=raw-checker-failed, 418 | bad-inline-option, 419 | locally-disabled, 420 | file-ignored, 421 | suppressed-message, 422 | useless-suppression, 423 | deprecated-pragma, 424 | use-symbolic-message-instead, 425 | consider-using-f-string, 426 | consider-using-dict-items, 427 | redundant-u-string-prefix, 428 | raise-missing-from, 429 | too-many-arguments, 430 | too-many-positional-arguments 431 | 432 | # Enable the message, report, category or checker with the given id(s). You can 433 | # either give multiple identifier separated by comma (,) or put this option 434 | # multiple time (only on the command line, not in the configuration file where 435 | # it should appear only once). See also the "--disable" option for examples. 436 | enable=c-extension-no-member 437 | 438 | 439 | [METHOD_ARGS] 440 | 441 | # List of qualified names (i.e., library.method) which require a timeout 442 | # parameter e.g. 'requests.api.get,requests.api.post' 443 | timeout-methods=requests.api.delete,requests.api.get,requests.api.head,requests.api.options,requests.api.patch,requests.api.post,requests.api.put,requests.api.request 444 | 445 | 446 | [MISCELLANEOUS] 447 | 448 | # List of note tags to take in consideration, separated by a comma. 449 | notes=FIXME, 450 | XXX, 451 | TODO 452 | 453 | # Regular expression of note tags to take in consideration. 454 | notes-rgx= 455 | 456 | 457 | [REFACTORING] 458 | 459 | # Maximum number of nested blocks for function / method body 460 | max-nested-blocks=5 461 | 462 | # Complete name of functions that never returns. When checking for 463 | # inconsistent-return-statements if a never returning function is called then 464 | # it will be considered as an explicit return statement and no message will be 465 | # printed. 466 | never-returning-functions=sys.exit,argparse.parse_error 467 | 468 | 469 | [REPORTS] 470 | 471 | # Python expression which should return a score less than or equal to 10. You 472 | # have access to the variables 'fatal', 'error', 'warning', 'refactor', 473 | # 'convention', and 'info' which contain the number of messages in each 474 | # category, as well as 'statement' which is the total number of statements 475 | # analyzed. This score is used by the global evaluation report (RP0004). 476 | evaluation=max(0, 0 if fatal else 10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10)) 477 | 478 | # Template used to display messages. This is a python new-style format string 479 | # used to format the message information. See doc for all details. 480 | msg-template= 481 | 482 | # Set the output format. Available formats are text, parseable, colorized, json 483 | # and msvs (visual studio). You can also give a reporter class, e.g. 484 | # mypackage.mymodule.MyReporterClass. 485 | #output-format= 486 | 487 | # Tells whether to display a full report or only the messages. 488 | reports=no 489 | 490 | # Activate the evaluation score. 491 | score=yes 492 | 493 | 494 | [SIMILARITIES] 495 | 496 | # Comments are removed from the similarity computation 497 | ignore-comments=yes 498 | 499 | # Docstrings are removed from the similarity computation 500 | ignore-docstrings=yes 501 | 502 | # Imports are removed from the similarity computation 503 | ignore-imports=yes 504 | 505 | # Signatures are removed from the similarity computation 506 | ignore-signatures=yes 507 | 508 | # Minimum lines number of a similarity. 509 | min-similarity-lines=4 510 | 511 | 512 | [SPELLING] 513 | 514 | # Limits count of emitted suggestions for spelling mistakes. 515 | max-spelling-suggestions=4 516 | 517 | # Spelling dictionary name. Available dictionaries: none. To make it work, 518 | # install the 'python-enchant' package. 519 | spelling-dict= 520 | 521 | # List of comma separated words that should be considered directives if they 522 | # appear at the beginning of a comment and should not be checked. 523 | spelling-ignore-comment-directives=fmt: on,fmt: off,noqa:,noqa,nosec,isort:skip,mypy: 524 | 525 | # List of comma separated words that should not be checked. 526 | spelling-ignore-words= 527 | 528 | # A path to a file that contains the private dictionary; one word per line. 529 | spelling-private-dict-file= 530 | 531 | # Tells whether to store unknown words to the private dictionary (see the 532 | # --spelling-private-dict-file option) instead of raising a message. 533 | spelling-store-unknown-words=no 534 | 535 | 536 | [STRING] 537 | 538 | # This flag controls whether inconsistent-quotes generates a warning when the 539 | # character used as a quote delimiter is used inconsistently within a module. 540 | check-quote-consistency=no 541 | 542 | # This flag controls whether the implicit-str-concat should generate a warning 543 | # on implicit string concatenation in sequences defined over several lines. 544 | check-str-concat-over-line-jumps=no 545 | 546 | 547 | [TYPECHECK] 548 | 549 | # List of decorators that produce context managers, such as 550 | # contextlib.contextmanager. Add to this list to register other decorators that 551 | # produce valid context managers. 552 | contextmanager-decorators=contextlib.contextmanager 553 | 554 | # List of members which are set dynamically and missed by pylint inference 555 | # system, and so shouldn't trigger E1101 when accessed. Python regular 556 | # expressions are accepted. 557 | generated-members= 558 | 559 | # Tells whether to warn about missing members when the owner of the attribute 560 | # is inferred to be None. 561 | ignore-none=yes 562 | 563 | # This flag controls whether pylint should warn about no-member and similar 564 | # checks whenever an opaque object is returned when inferring. The inference 565 | # can return multiple potential results while evaluating a Python object, but 566 | # some branches might not be evaluated, which results in partial inference. In 567 | # that case, it might be useful to still emit no-member and other checks for 568 | # the rest of the inferred objects. 569 | ignore-on-opaque-inference=yes 570 | 571 | # List of symbolic message names to ignore for Mixin members. 572 | ignored-checks-for-mixins=no-member, 573 | not-async-context-manager, 574 | not-context-manager, 575 | attribute-defined-outside-init 576 | 577 | # List of class names for which member attributes should not be checked (useful 578 | # for classes with dynamically set attributes). This supports the use of 579 | # qualified names. 580 | ignored-classes=optparse.Values,thread._local,_thread._local,argparse.Namespace 581 | 582 | # Show a hint with possible names when a member name was not found. The aspect 583 | # of finding the hint is based on edit distance. 584 | missing-member-hint=yes 585 | 586 | # The minimum edit distance a name should have in order to be considered a 587 | # similar match for a missing member name. 588 | missing-member-hint-distance=1 589 | 590 | # The total number of similar names that should be taken in consideration when 591 | # showing a hint for a missing member. 592 | missing-member-max-choices=1 593 | 594 | # Regex pattern to define which classes are considered mixins. 595 | mixin-class-rgx=.*[Mm]ixin 596 | 597 | # List of decorators that change the signature of a decorated function. 598 | signature-mutators= 599 | 600 | 601 | [VARIABLES] 602 | 603 | # List of additional names supposed to be defined in builtins. Remember that 604 | # you should avoid defining new builtins when possible. 605 | additional-builtins= 606 | 607 | # Tells whether unused global variables should be treated as a violation. 608 | allow-global-unused-variables=yes 609 | 610 | # List of names allowed to shadow builtins 611 | allowed-redefined-builtins= 612 | 613 | # List of strings which can identify a callback function by name. A callback 614 | # name must start or end with one of those strings. 615 | callbacks=cb_, 616 | _cb 617 | 618 | # A regular expression matching the name of dummy variables (i.e. expected to 619 | # not be used). 620 | dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_ 621 | 622 | # Argument names that match this expression will be ignored. 623 | ignored-argument-names=_.*|^ignored_|^unused_ 624 | 625 | # Tells whether we should check for unused import in __init__ files. 626 | init-import=no 627 | 628 | # List of qualified module names which can have objects that can redefine 629 | # builtins. 630 | redefining-builtins-modules=six.moves,past.builtins,future.builtins,builtins,io 631 | -------------------------------------------------------------------------------- /AUTHORS: -------------------------------------------------------------------------------- 1 | (C) YANDEX LLC, 2016-2025 2 | 3 | People that contributed to it: 4 | 5 | Alexander Artemenko 6 | Alexander Klyuev 7 | Ernst Haagsman 8 | Evgeny Dyukov 9 | Tejas Mandre 10 | Vadim Bahmatovich 11 | Vladimir Antipin 12 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | # vim:set ft=dockerfile: 2 | FROM ubuntu:jammy 3 | 4 | # explicitly set user/group IDs 5 | RUN groupadd -r postgres --gid=999 && useradd -r -d /var/lib/postgresql -g postgres --uid=999 postgres 6 | 7 | # make the "en_US.UTF-8" locale so postgres will be utf-8 enabled by default 8 | RUN apt-get update && apt-get install -y ca-certificates locales && \ 9 | rm -rf /var/lib/apt/lists/* && \ 10 | localedef -i en_US -c -f UTF-8 -A /usr/share/locale/locale.alias en_US.UTF-8 11 | ENV LANG en_US.utf8 12 | ENV DEBIAN_FRONTEND noninteractive 13 | 14 | ENV PG_MAJOR 17 15 | 16 | RUN echo 'deb http://apt.postgresql.org/pub/repos/apt/ jammy-pgdg main' $PG_MAJOR > /etc/apt/sources.list.d/pgdg.list 17 | RUN echo 'deb https://ppa.launchpadcontent.net/deadsnakes/ppa/ubuntu/ jammy main' > /etc/apt/sources.list.d/deadsnakes-ubuntu-ppa.list 18 | 19 | RUN apt-get -o Acquire::AllowInsecureRepositories=true \ 20 | -o Acquire::AllowDowngradeToInsecureRepositories=true update \ 21 | && apt-get \ 22 | -o Dpkg::Options::="--force-confdef" \ 23 | -o Dpkg::Options::="--force-confold" \ 24 | -o APT::Get::AllowUnauthenticated=true \ 25 | install -y postgresql-common \ 26 | sudo \ 27 | libpq-dev \ 28 | python2.7-dev \ 29 | python3.11-dev \ 30 | python3.11-lib2to3 \ 31 | build-essential \ 32 | curl \ 33 | postgresql-$PG_MAJOR \ 34 | postgresql-contrib-$PG_MAJOR \ 35 | && curl https://bootstrap.pypa.io/get-pip.py -o get-pip.py \ 36 | && python3.11 get-pip.py \ 37 | && pip3.11 install tox 38 | 39 | COPY ./ /dist 40 | 41 | CMD ["/dist/run_test.sh"] 42 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright (c) 2016, YANDEX LLC 2 | 3 | Permission to use, copy, modify, and distribute this software and its 4 | documentation for any purpose, without fee, and without a written 5 | agreement is hereby granted, provided that the above copyright notice 6 | and this paragraph and the following two paragraphs appear in all copies. 7 | 8 | IN NO EVENT SHALL YANDEX LLC BE LIABLE TO ANY PARTY FOR DIRECT, 9 | INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES, INCLUDING LOST 10 | PROFITS, ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, 11 | EVEN IF YANDEX LLC HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 12 | 13 | YANDEX LLC SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, BUT NOT 14 | LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A 15 | PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS ON AN "AS IS" 16 | BASIS, AND YANDEX LLC HAS NO OBLIGATIONS TO PROVIDE MAINTENANCE, 17 | SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS. 18 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | .PHONY: test 2 | 3 | test: 4 | docker build -t pgmigrate . 5 | docker run -t pgmigrate 6 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | [![PyPI version](https://badge.fury.io/py/yandex-pgmigrate.svg)](https://badge.fury.io/py/yandex-pgmigrate) 2 | ![Build Status](https://github.com/yandex/pgmigrate/workflows/Test/badge.svg) 3 | 4 | # PGmigrate 5 | 6 | PostgreSQL migrations made easy 7 | 8 | ## Overview 9 | 10 | PGmigrate is a database migration tool developed by Yandex. 11 | 12 | PGmigrate has the following key-features: 13 | 14 | * **Transactional and nontransactional migrations:** you can enjoy whole power 15 | of PostgreSQL DDL 16 | * **Callbacks:** you can run some DDL on specific steps of migration process 17 | (e.g. drop some code before executing migrations, and create it back after 18 | migrations were applied) 19 | * **Online migrations:** you can execute series of transactional migrations 20 | and callbacks in a single transaction (so, if something goes wrong simple 21 | `ROLLBACK` will bring you in consistent state) 22 | 23 | ## Install 24 | 25 | ``` 26 | pip install yandex-pgmigrate 27 | ``` 28 | 29 | ## Running tests 30 | 31 | Tests require running PostgreSQL instance with superuser (to create/drop dbs). 32 | You could setup this yourself and use [tox](https://pypi.python.org/pypi/tox) 33 | to start tests: 34 | ``` 35 | tox 36 | ``` 37 | Second option is to use [docker](https://www.docker.com) and make: 38 | ``` 39 | make test 40 | ``` 41 | 42 | ## How to use 43 | 44 | Complete manual is [here](doc/tutorial.md). 45 | 46 | ## Release history 47 | 48 | * 1.0.9 (2024-07-06) 49 | * Add an option to show only unapplied migrations in info 50 | * 1.0.8 (2024-03-08) 51 | * Allow reordering setting schema version and afterEach callback 52 | * 1.0.7 (2022-02-02) 53 | * Skip unnecessary schema creation on init 54 | * Add file path to statement apply error log 55 | * Add version gaps check 56 | * 1.0.6 (2020-10-29) 57 | * Make dsn manipulations more robust 58 | * Fix empty values-related bugs in config and args parsing 59 | * 1.0.5 (2020-02-29) 60 | * Use application_name instead of backend pid for conflict termination 61 | * 1.0.4 (2019-04-14) 62 | * Allow using subdirs in migrations folder 63 | * 1.0.3 (2017-12-28) 64 | * Fix migration error with comment at the end of file 65 | * Add blocking pids termination 66 | * Some minor fixes and improvements 67 | * 1.0.2 (2017-04-05) 68 | * Speed up get_info function a bit 69 | * Fix callbacks in transactional/nontransactional migrations mix on db init 70 | * 1.0.1 (2017-04-01) 71 | * Fix bug with python format patterns in migration text 72 | * Sort info command output by version 73 | * Support 'latest' target version 74 | * Add option to override user in migration meta 75 | * Fix info command fail without target on initialized database 76 | * Add session setup option 77 | * 1.0.0 (2016-10-03) 78 | * First opensource version 79 | 80 | ## License 81 | 82 | Distributed under the PostgreSQL license. See [LICENSE](LICENSE) for more 83 | information. 84 | -------------------------------------------------------------------------------- /doc/foodb/callbacks/afterAll/00_dummy_after_all.sql: -------------------------------------------------------------------------------- 1 | INSERT INTO ops (op) VALUES ('afterAll 00_dummy_after_all.sql'); 2 | -------------------------------------------------------------------------------- /doc/foodb/callbacks/afterEach/00_dummy_after_each.sql: -------------------------------------------------------------------------------- 1 | INSERT INTO ops (op) VALUES ('afterEach 00_dummy_after_each.sql'); 2 | -------------------------------------------------------------------------------- /doc/foodb/callbacks/beforeAll/00_create_database_ops.sql: -------------------------------------------------------------------------------- 1 | CREATE TABLE IF NOT EXISTS ops ( 2 | seq SERIAL PRIMARY KEY, 3 | op TEXT NOT NULL 4 | ); 5 | 6 | INSERT INTO ops (op) VALUES ('beforeAll 00_create_database_ops.sql'); 7 | -------------------------------------------------------------------------------- /doc/foodb/callbacks/beforeEach/00_dummy_before_each.sql: -------------------------------------------------------------------------------- 1 | INSERT INTO ops (op) VALUES ('beforeEach 00_dummy_before_each.sql'); 2 | -------------------------------------------------------------------------------- /doc/foodb/grants/foo.sql: -------------------------------------------------------------------------------- 1 | GRANT USAGE ON SCHEMA foo TO foo; 2 | GRANT ALL ON ALL TABLES IN SCHEMA foo TO foo; 3 | 4 | INSERT INTO ops (op) VALUES ('grants foo'); 5 | -------------------------------------------------------------------------------- /doc/foodb/migrations.yml: -------------------------------------------------------------------------------- 1 | callbacks: 2 | beforeAll: 3 | - callbacks/beforeAll 4 | beforeEach: 5 | - callbacks/beforeEach 6 | afterEach: 7 | - callbacks/afterEach 8 | afterAll: 9 | - callbacks/afterAll 10 | - grants 11 | conn: dbname=foodb 12 | -------------------------------------------------------------------------------- /doc/foodb/migrations/V0001__Initial_schema_foo.sql: -------------------------------------------------------------------------------- 1 | CREATE SCHEMA foo; 2 | 3 | CREATE TABLE foo.foo ( 4 | id BIGINT PRIMARY KEY, 5 | bar TEXT NOT NULL 6 | ); 7 | 8 | INSERT INTO ops (op) VALUES ('migration V0001__Initial_schema_foo.sql'); 9 | -------------------------------------------------------------------------------- /doc/foodb/migrations/V0002__Add_baz_column_to_foo.sql: -------------------------------------------------------------------------------- 1 | ALTER TABLE foo.foo ADD COLUMN baz BIGINT NOT NULL DEFAULT 0; 2 | 3 | INSERT INTO ops (op) VALUES ('migration V0002__Add_baz_column_to_foo.sql'); 4 | -------------------------------------------------------------------------------- /doc/foodb/migrations/V0003__NONTRANSACTIONAL_Add_index_on_baz_column.sql: -------------------------------------------------------------------------------- 1 | CREATE INDEX CONCURRENTLY i_foo_baz ON foo.foo (baz); 2 | 3 | INSERT INTO ops (op) VALUES ('migration V0003__NONTRANSACTIONAL_Add_index_on_baz_column.sql'); 4 | -------------------------------------------------------------------------------- /doc/tutorial.md: -------------------------------------------------------------------------------- 1 | # PGmigrate tutorial 2 | 3 | We'll play around with example database `foodb`. 4 | 5 | ## Base directory structure of our example 6 | 7 | Our [Example db](foodb) migrations dir structure looks like this: 8 | ``` 9 | foodb 10 | ├── callbacks # directory with sql callbacks 11 | │ ├── afterAll # will be executed before commit and after last migration 12 | │ ├── afterEach # will be executed after each migration 13 | │ ├── beforeAll # will be executed after begin and before first migration 14 | │ └── beforeEach # will be executed before each migration 15 | ├── grants # use this dir to set special callbacks for grants 16 | ├── migrations # migrations dir 17 | ├── migrations.yml # pgmigrate configuration 18 | ``` 19 | Every sql file has special operation on table `ops`. 20 | This will help in understanding what is going on in each pgmigrate run. 21 | 22 | ## Configuration 23 | 24 | Let's start with [Example configuration](foodb/migrations.yml). 25 | 26 | ### Callbacks 27 | Callbacks could be configured via command-line arguments like this: 28 | ``` 29 | admin@localhost foodb $ pgmigrate -a beforeAll:callbacks/beforeAll,afterAll:callbacks/afterAll ... 30 | ``` 31 | But if we have a lot of callbacks we could use configuration file for them: 32 | ``` 33 | callbacks: 34 | beforeAll: 35 | - callbacks/beforeAll 36 | beforeEach: 37 | - callbacks/beforeEach 38 | afterEach: 39 | - callbacks/afterEach 40 | afterAll: 41 | - callbacks/afterAll 42 | - grants 43 | ``` 44 | 45 | ### Connection 46 | We could use command-line arguments for connection configuration: 47 | ``` 48 | admin@localhost foodb $ pgmigrate -c 'dbname=foodb user=foo ...' ... 49 | ``` 50 | Or configuration file: 51 | ``` 52 | conn: dbname=foodb 53 | ``` 54 | Third option for setting connection params is using [environment variables](https://www.postgresql.org/docs/current/libpq-envars.html): 55 | ``` 56 | admin@localhost foodb $ PGDATABASE=foodb pgmigrate -c '' ... 57 | ``` 58 | Note: we need to explicitly set connstring to empty value via command-line 59 | argument or configuration file to force psycopg2 to pick fields from 60 | environment. 61 | 62 | ## Migration file name pattern 63 | 64 | All migration files should have versions and 65 | names in the following format 66 | ``` 67 | V__.sql 68 | ``` 69 | Note: files not matching this pattern will be skipped. 70 | 71 | ## Creating `foo` user and `foodb` 72 | 73 | We'll need dummy user and database for our experiments. 74 | ``` 75 | postgres=# CREATE ROLE foo WITH LOGIN PASSWORD 'foo'; 76 | CREATE ROLE 77 | postgres=# CREATE DATABASE foodb; 78 | CREATE DATABASE 79 | ``` 80 | 81 | ## Getting migrations info before first migration 82 | 83 | ``` 84 | admin@localhost foodb $ pgmigrate -t 1 info 85 | { 86 | "1": { 87 | "description": "Initial schema foo", 88 | "transactional": true, 89 | "version": 1, 90 | "installed_by": null, 91 | "type": "auto", 92 | "installed_on": null 93 | } 94 | } 95 | ``` 96 | Here we see json description of migrations that will be applied if 97 | we want to get to version 1. 98 | 99 | Let's try to check steps to apply up to version 3 but ignoring version 1: 100 | ``` 101 | admin@localhost foodb $ pgmigrate -b 1 -t 3 info 102 | { 103 | "2": { 104 | "description": "Add baz column to foo", 105 | "transactional": true, 106 | "version": 2, 107 | "installed_by": null, 108 | "type": "auto", 109 | "installed_on": null 110 | }, 111 | "3": { 112 | "description": "NONTRANSACTIONAL Add index on baz column", 113 | "transactional": false, 114 | "version": 3, 115 | "installed_by": null, 116 | "type": "auto", 117 | "installed_on": null 118 | } 119 | } 120 | ``` 121 | 122 | ## Migrating to first version 123 | 124 | ``` 125 | admin@localhost foodb $ pgmigrate -t 1 migrate 126 | admin@localhost foodb $ echo $? 127 | 0 128 | ``` 129 | Ok. Migration applied. Let's see what is in our db now. 130 | 131 | ``` 132 | admin@localhost foodb $ psql foodb 133 | psql (9.5.4) 134 | Type "help" for help. 135 | 136 | foodb=# SELECT * FROM ops; 137 | seq | op 138 | -----+----------------------------------------- 139 | 1 | beforeAll 00_create_database_ops.sql 140 | 2 | beforeEach 00_dummy_before_each.sql 141 | 3 | migration V0001__Initial_schema_foo.sql 142 | 4 | afterEach 00_dummy_after_each.sql 143 | 5 | afterAll 00_dummy_after_all.sql 144 | 6 | grants foo 145 | (6 rows) 146 | 147 | foodb=# \dt foo.foo 148 | List of relations 149 | Schema | Name | Type | Owner 150 | --------+------+-------+------- 151 | foo | foo | table | admin 152 | (1 row) 153 | 154 | foodb=# \dS+ foo.foo 155 | Table "foo.foo" 156 | Column | Type | Modifiers | Storage | Stats target | Description 157 | --------+--------+-----------+----------+--------------+------------- 158 | id | bigint | not null | plain | | 159 | bar | text | not null | extended | | 160 | Indexes: 161 | "foo_pkey" PRIMARY KEY, btree (id) 162 | ``` 163 | 164 | Let's check if `foo` user can really do something with our new table. 165 | ``` 166 | psql "dbname=foodb user=foo password=foo host=localhost" 167 | psql (9.5.4) 168 | Type "help" for help. 169 | 170 | foodb=> SELECT * FROM foo.foo; 171 | id | bar 172 | ----+----- 173 | (0 rows) 174 | ``` 175 | 176 | ## Mixing transactional and nontransactional migrations 177 | Let's try to go to version 3. 178 | ``` 179 | admin@localhost foodb $ pgmigrate -t 3 migrate 180 | 2016-09-29 00:14:35,402 ERROR : Unable to mix transactional and nontransactional migrations 181 | Traceback (most recent call last): 182 | File "/usr/local/bin/pgmigrate", line 9, in 183 | load_entry_point('yandex-pgmigrate==1.0.0', 'console_scripts', 'pgmigrate')() 184 | File "/usr/local/lib/python2.7/dist-packages/pgmigrate.py", line 663, in _main 185 | COMMANDS[args.cmd](config) 186 | File "/usr/local/lib/python2.7/dist-packages/pgmigrate.py", line 549, in migrate 187 | raise MigrateError('Unable to mix transactional and ' 188 | pgmigrate.MigrateError: Unable to mix transactional and nontransactional migrations 189 | ``` 190 | Oops! It complained. But why? The main reason for this is quite simple: 191 | Your production databases are likely larger than test ones. 192 | And migration to version 3 could take a lot of time. 193 | You definitely should stop on version 2, check that everything is working fine, 194 | and then move to version 3. 195 | 196 | ## Migrating to second version 197 | Ok. Now let's try version 2. 198 | ``` 199 | admin@localhost foodb $ pgmigrate -t 2 migrate 200 | admin@localhost foodb $ echo $? 201 | 0 202 | ``` 203 | Looks good. But what is in db? 204 | ``` 205 | admin@localhost foodb $ psql foodb 206 | psql (9.5.4) 207 | Type "help" for help. 208 | 209 | foodb=# SELECT * FROM ops; 210 | seq | op 211 | -----+-------------------------------------------- 212 | 1 | beforeAll 00_create_database_ops.sql 213 | 2 | beforeEach 00_dummy_before_each.sql 214 | 3 | migration V0001__Initial_schema_foo.sql 215 | 4 | afterEach 00_dummy_after_each.sql 216 | 5 | afterAll 00_dummy_after_all.sql 217 | 6 | grants foo 218 | 7 | beforeAll 00_create_database_ops.sql 219 | 8 | beforeEach 00_dummy_before_each.sql 220 | 9 | migration V0002__Add_baz_column_to_foo.sql 221 | 10 | afterEach 00_dummy_after_each.sql 222 | 11 | afterAll 00_dummy_after_all.sql 223 | 12 | grants foo 224 | (12 rows) 225 | 226 | foodb=# \dS+ foo.foo 227 | Table "foo.foo" 228 | Column | Type | Modifiers | Storage | Stats target | Description 229 | --------+--------+--------------------+----------+--------------+------------- 230 | id | bigint | not null | plain | | 231 | bar | text | not null | extended | | 232 | baz | bigint | not null default 0 | plain | | 233 | Indexes: 234 | "foo_pkey" PRIMARY KEY, btree (id) 235 | ``` 236 | As we can see migration steps are almost the same as in version 1. 237 | 238 | ## Migrating to version 3 with nontransactional migration 239 | ``` 240 | admin@localhost foodb $ pgmigrate -t 3 migrate 241 | admin@localhost foodb $ echo $? 242 | 0 243 | ``` 244 | 245 | In database: 246 | ``` 247 | admin@localhost foodb $ psql foodb 248 | psql (9.5.4) 249 | Type "help" for help. 250 | 251 | foodb=# SELECT * FROM ops; 252 | seq | op 253 | -----+--------------------------------------------------------------- 254 | 1 | beforeAll 00_create_database_ops.sql 255 | 2 | beforeEach 00_dummy_before_each.sql 256 | 3 | migration V0001__Initial_schema_foo.sql 257 | 4 | afterEach 00_dummy_after_each.sql 258 | 5 | afterAll 00_dummy_after_all.sql 259 | 6 | grants foo 260 | 7 | beforeAll 00_create_database_ops.sql 261 | 8 | beforeEach 00_dummy_before_each.sql 262 | 9 | migration V0002__Add_baz_column_to_foo.sql 263 | 10 | afterEach 00_dummy_after_each.sql 264 | 11 | afterAll 00_dummy_after_all.sql 265 | 12 | grants foo 266 | 13 | migration V0003__NONTRANSACTIONAL_Add_index_on_baz_column.sql 267 | (13 rows) 268 | 269 | foodb=# \dS+ foo.foo 270 | Table "foo.foo" 271 | Column | Type | Modifiers | Storage | Stats target | Description 272 | --------+--------+--------------------+----------+--------------+------------- 273 | id | bigint | not null | plain | | 274 | bar | text | not null | extended | | 275 | baz | bigint | not null default 0 | plain | | 276 | Indexes: 277 | "foo_pkey" PRIMARY KEY, btree (id) 278 | "i_foo_baz" btree (baz) 279 | ``` 280 | No callbacks were applied this time (we are trying to run the absolute 281 | minimum of operations outside of transactions). 282 | 283 | ## Baseline 284 | 285 | Let's suppose that you already have a database with schema on version 3. 286 | But you have already reached this state without using pgmigrate. 287 | How should you migrate to version 4 and so on with it? 288 | 289 | Let's remove schema_version info from our database 290 | ``` 291 | admin@localhost foodb $ pgmigrate clean 292 | ``` 293 | 294 | Now let's check how pgmigrate will bring us to version 3: 295 | ``` 296 | admin@localhost foodb $ pgmigrate -t 3 info 297 | { 298 | "1": { 299 | "description": "Initial schema foo", 300 | "transactional": true, 301 | "version": 1, 302 | "installed_by": null, 303 | "type": "auto", 304 | "installed_on": null 305 | }, 306 | "2": { 307 | "description": "Add baz column to foo", 308 | "transactional": true, 309 | "version": 2, 310 | "installed_by": null, 311 | "type": "auto", 312 | "installed_on": null 313 | }, 314 | "3": { 315 | "description": "NONTRANSACTIONAL Add index on baz column", 316 | "transactional": false, 317 | "version": 3, 318 | "installed_by": null, 319 | "type": "auto", 320 | "installed_on": null 321 | } 322 | } 323 | ``` 324 | This looks really bad. Our migration v1 will definitely fail 325 | (because schema `foo` already exists). 326 | Let's tell pgmigrate that our database is already on version 3. 327 | ``` 328 | admin@localhost foodb $ pgmigrate -b 3 baseline 329 | admin@localhost foodb $ pgmigrate -t 3 info 330 | { 331 | "3": { 332 | "description": "Forced baseline", 333 | "transactional": true, 334 | "version": 3, 335 | "installed_on": "2016-09-29 00:37:27", 336 | "type": "manual", 337 | "installed_by": "admin" 338 | } 339 | } 340 | ``` 341 | 342 | ## Migrations on empty database 343 | 344 | When you have hundreds of migrations with some nontransactional ones 345 | you really don't want to stop on each of them to get your empty database 346 | to specific version (consider creating new database for some experiments). 347 | 348 | PGmigrate is able to run such kind of migration in single command run 349 | (but you should definitely know what are you doing). 350 | 351 | Let's try it. 352 | Drop and create empty `foodb` 353 | ``` 354 | postgres=# DROP DATABASE foodb; 355 | DROP DATABASE 356 | postgres=# CREATE DATABASE foodb; 357 | CREATE DATABASE 358 | ``` 359 | 360 | Now migrate to latest available version 361 | ``` 362 | admin@localhost foodb $ pgmigrate -t latest migrate 363 | ``` 364 | 365 | Operations log will look like this: 366 | ``` 367 | admin@localhost foodb $ psql foodb 368 | psql (9.5.4) 369 | Type "help" for help. 370 | 371 | foodb=# SELECT * FROM ops; 372 | seq | op 373 | -----+--------------------------------------------------------------- 374 | 1 | beforeAll 00_create_database_ops.sql 375 | 2 | beforeEach 00_dummy_before_each.sql 376 | 3 | migration V0001__Initial_schema_foo.sql 377 | 4 | afterEach 00_dummy_after_each.sql 378 | 5 | beforeEach 00_dummy_before_each.sql 379 | 6 | migration V0002__Add_baz_column_to_foo.sql 380 | 7 | afterEach 00_dummy_after_each.sql 381 | 8 | afterAll 00_dummy_after_all.sql 382 | 9 | grants foo 383 | 10 | migration V0003__NONTRANSACTIONAL_Add_index_on_baz_column.sql 384 | (10 rows) 385 | ``` 386 | 387 | ## UTF-8 Migrations 388 | 389 | In most cases you should avoid non-ascii characters in your migrations. 390 | So PGmigrate will complain about them with: 391 | ``` 392 | pgmigrate.MalformedStatement: Non ascii symbols in file 393 | ``` 394 | 395 | But sometimes there is no way to avoid migration with UTF-8 396 | (imagine a case with inserting some initial data in your database). 397 | You could insert modeline in migration file to disable 398 | non-ascii characters check: 399 | ``` 400 | /* pgmigrate-encoding: utf-8 */ 401 | ``` 402 | 403 | ## Session setup 404 | 405 | Sometimes you need to set some session options before migrate 406 | (e.g. isolation level). It is possible with `-s` option or `session` in config. 407 | For example to set `serializable` isolation level and 408 | lock timeout to 30 seconds one could do something like this: 409 | ``` 410 | pgmigrate -s "SET SESSION CHARACTERISTICS AS TRANSACTION ISOLATION LEVEL SERIALIZABLE" \ 411 | -s "SET lock_timeout = '30s'" ... 412 | ``` 413 | This feature will not work with connection pooler (such as `odyssey` or `pgbouncer`) 414 | in non-session mode. 415 | 416 | ## Terminating blocking pids 417 | 418 | On heavy loaded production environments running some migrations 419 | could block queries by application backends. 420 | Unfortunately if migration is blocked by some other query it could lead 421 | to really slow database queries. 422 | For example lock queue like this: 423 | ``` 424 | 425 | 426 | 427 | ``` 428 | makes database almost unavailable for at least `idle_in_transaction_timeout`. 429 | To mitigate such issues there is `-l ` option in pgmigrate 430 | which starts separate thread running `pg_terminate_backend(pid)` for 431 | each pid blocking any of pgmigrate conn pids every `interval` seconds. 432 | Of course pgmigrate should be able to terminate other pids so migration user 433 | should be the app user or have `pg_signal_backend` grant. To terminate 434 | superuser (e.g. `postgres`) pids one could run pgmigrate with superuser. 435 | Session setup should not manipulate `application_name` setting because 436 | conflict terminations expects application names in pg_stat_activity to 437 | match internal dsn values. 438 | 439 | Note: this feature relies on `pg_blocking_pids()` function available since 440 | PostgreSQL 9.6. 441 | 442 | ## Session restriction 443 | 444 | In some cases you need to use several independent schemas in one database. 445 | For example large SaaS applications tend to use such setup for client separation. 446 | To use non-default schema for migrations one could pass `-m ` option. 447 | By default schema restriction is enabled (it prevents access and modification 448 | of relations not in system schemas and selected schema). 449 | Some restrictions are hard to implement with current approach: 450 | relation drop and nontransactional migrations support. 451 | Schema restriction could be disabled with `--disable_schema_check` option. 452 | 453 | ## Avoiding gaps in versions 454 | 455 | Sometimes development process leads to adding a gaps in migration versions. 456 | E.g. current version is `N`. Alice worked on branch `a` and expects branch `b` 457 | (Bob is working on it) to be merged first. So she decides to select version 458 | `N+2` instead of `N+1` (Bob selects this version). But for some reason branch `a` 459 | was merged before `b`. If we run migrations on database without branch `b` merge 460 | we'll need to change version in `b` to `N+3` or it will be skipped. 461 | One could run migrate with `--check_serial_versions` option to avoid applying 462 | migrations with gaps in versions. 463 | -------------------------------------------------------------------------------- /features/baseline.feature: -------------------------------------------------------------------------------- 1 | Feature: Baseline 2 | 3 | Scenario: Setting baseline leaves only one migration 4 | Given migration dir 5 | And migrations 6 | | file | code | 7 | | V1__Single_migration.sql | SELECT 1; | 8 | | V2__Another_migration.sql | SELECT 1; | 9 | And database and connection 10 | And successful pgmigrate run with "-t 2 migrate" 11 | When we run pgmigrate with "-b 3 baseline" 12 | Then pgmigrate command "succeeded" 13 | And database contains schema_version 14 | And migration info contains forced baseline=3 15 | 16 | Scenario: Setting baseline on noninitialized database 17 | Given migration dir 18 | And database and connection 19 | When we run pgmigrate with "-b 1 baseline" 20 | Then pgmigrate command "succeeded" 21 | And database contains schema_version 22 | And migration info contains forced baseline=1 23 | -------------------------------------------------------------------------------- /features/clean.feature: -------------------------------------------------------------------------------- 1 | Feature: Clean 2 | 3 | Scenario: Cleaning database makes it uninitialized 4 | Given migration dir 5 | And migrations 6 | | file | code | 7 | | V1__Single_migration.sql | SELECT 1; | 8 | And database and connection 9 | And successful pgmigrate run with "-t 1 migrate" 10 | When we run pgmigrate with "clean" 11 | Then pgmigrate command "succeeded" 12 | And database has no schema_version table 13 | -------------------------------------------------------------------------------- /features/config.feature: -------------------------------------------------------------------------------- 1 | Feature: Getting info from config 2 | 3 | Scenario: Empty config 4 | Given migration dir 5 | And migrations 6 | | file | code | 7 | | V1__Single_migration.sql | SELECT 1; | 8 | And empty config 9 | And database and connection 10 | When we run pgmigrate with "-t 1 migrate" 11 | Then pgmigrate command "succeeded" 12 | 13 | Scenario: Empty callbacks in config 14 | Given migration dir 15 | And migrations 16 | | file | code | 17 | | V1__Single_migration.sql | SELECT 1; | 18 | And config 19 | """ 20 | callbacks: 21 | """ 22 | And database and connection 23 | When we run pgmigrate with "-t 1 migrate" 24 | Then pgmigrate command "succeeded" 25 | 26 | Scenario: Empty callbacks lists in config 27 | Given migration dir 28 | And migrations 29 | | file | code | 30 | | V1__Single_migration.sql | SELECT 1; | 31 | And config 32 | """ 33 | callbacks: 34 | beforeAll: 35 | beforeEach: 36 | afterEach: 37 | afterAll: 38 | """ 39 | And database and connection 40 | When we run pgmigrate with "-t 1 migrate" 41 | Then pgmigrate command "succeeded" 42 | 43 | Scenario: Empty callbacks lists in args 44 | Given migration dir 45 | And migrations 46 | | file | code | 47 | | V1__Single_migration.sql | SELECT 1; | 48 | And database and connection 49 | When we run pgmigrate with "-a ,,,, -t 1 migrate" 50 | Then pgmigrate command "succeeded" 51 | 52 | Scenario: Callbacks from config are executed in correct order 53 | Given migration dir 54 | And migrations 55 | | file | code | 56 | | V1__Single_migration.sql | INSERT INTO mycooltable (op) values ('Migration 1'); | 57 | | V2__Another_migration.sql | INSERT INTO mycooltable (op) values ('Migration 2'); | 58 | And config callbacks 59 | | type | file | code | 60 | | beforeAll | before_all.sql | CREATE TABLE mycooltable (seq SERIAL PRIMARY KEY, op TEXT); | 61 | | beforeEach | before_each.sql | INSERT INTO mycooltable (op) values ('Before each'); | 62 | | afterEach | after_each.sql | INSERT INTO mycooltable (op) values ('After each'); | 63 | | afterAll | after_all.sql | INSERT INTO mycooltable (op) values ('After all'); | 64 | And database and connection 65 | When we run pgmigrate with "-t 2 migrate" 66 | Then pgmigrate command "succeeded" 67 | And database contains schema_version 68 | And query "SELECT * from mycooltable order by seq;" equals 69 | | seq | op | 70 | | 1 | Before each | 71 | | 2 | Migration 1 | 72 | | 3 | After each | 73 | | 4 | Before each | 74 | | 5 | Migration 2 | 75 | | 6 | After each | 76 | | 7 | After all | 77 | 78 | Scenario: Reordering setting schema version and afterEach callback works 79 | Given migration dir 80 | And migrations 81 | | file | code | 82 | | V1__Single_migration.sql | SELECT 1; | 83 | | V2__Another_migration.sql | SELECT 1; | 84 | And config 85 | """ 86 | set_version_info_after_callbacks: true 87 | """ 88 | And config callbacks 89 | | type | file | code | 90 | | beforeAll | before_all.sql | CREATE TABLE mycooltable (seq SERIAL PRIMARY KEY, count int); | 91 | | afterEach | after_each.sql | INSERT INTO mycooltable (count) SELECT count(*) FROM schema_version; | 92 | And database and connection 93 | When we run pgmigrate with "-t 2 migrate" 94 | Then pgmigrate command "succeeded" 95 | And database contains schema_version 96 | And query "SELECT * from mycooltable order by seq;" equals 97 | | seq | count | 98 | | 1 | 0 | 99 | | 2 | 1 | 100 | 101 | Scenario: Callbacks from config are executed from dir 102 | Given migration dir 103 | And migrations 104 | | file | code | 105 | | V1__Single_migration.sql | CREATE TABLE mycooltable (seq SERIAL PRIMARY KEY, op TEXT); | 106 | And config callbacks 107 | | type | dir | file | code | 108 | | afterAll | after_all | callback.sql | INSERT INTO mycooltable (op) values ('After all'); | 109 | And database and connection 110 | When we run pgmigrate with "-t 2 migrate" 111 | Then pgmigrate command "succeeded" 112 | And database contains schema_version 113 | And query "SELECT * from mycooltable order by seq;" equals 114 | | seq | op | 115 | | 1 | After all | 116 | 117 | Scenario: Callbacks from config are overridden by args 118 | Given migration dir 119 | And migrations 120 | | file | code | 121 | | V1__Single_migration.sql | CREATE TABLE mycooltable (seq SERIAL PRIMARY KEY, op TEXT); | 122 | And config callbacks 123 | | type | file | code | 124 | | INVALID | callback.sql | SELECT 1; | 125 | And callbacks 126 | | type | file | code | 127 | | afterAll | after_all.sql | INSERT INTO mycooltable (op) values ('After all'); | 128 | And database and connection 129 | When we run pgmigrate with our callbacks and "-t 2 migrate" 130 | Then pgmigrate command "succeeded" 131 | And database contains schema_version 132 | And query "SELECT * from mycooltable order by seq;" equals 133 | | seq | op | 134 | | 1 | After all | 135 | 136 | Scenario: User from config is saved in migration metadata 137 | Given migration dir 138 | And migrations 139 | | file | code | 140 | | V1__Single_migration.sql | SELECT 1; | 141 | And database and connection 142 | When we run pgmigrate with "-u test_user -t 1 migrate" 143 | Then pgmigrate command "succeeded" 144 | And database contains schema_version 145 | And query "SELECT version, installed_by from schema_version;" equals 146 | | version | installed_by | 147 | | 1 | test_user | 148 | 149 | -------------------------------------------------------------------------------- /features/conflicting_pids.feature: -------------------------------------------------------------------------------- 1 | Feature: Conflicting pids termination 2 | 3 | Scenario: Transactional migration blocked by update passes 4 | Given migration dir 5 | And migrations 6 | | file | code | 7 | | V1__Create_test_table.sql | CREATE TABLE test (id bigint); | 8 | | V2__Insert_test_data.sql | INSERT INTO test (id) VALUES (1); | 9 | | V3__Alter_test_table.sql | ALTER TABLE test ADD COLUMN test text; | 10 | And database and connection 11 | And successful pgmigrate run with "-t 2 migrate" 12 | And not committed query "UPDATE test SET id = 2 WHERE id = 1" 13 | When we run pgmigrate with "-l 0.1 -t 3 migrate" 14 | Then pgmigrate command "succeeded" 15 | 16 | Scenario: Nontransactional migration blocked by update passes 17 | Given migration dir 18 | And migrations 19 | | file | code | 20 | | V1__Create_test_table.sql | CREATE TABLE test (id bigint); | 21 | | V2__Insert_test_data.sql | INSERT INTO test (id) VALUES (1); | 22 | | V3__NONTRANSACTIONAL_migration.sql | ALTER TABLE test ADD COLUMN test text; | 23 | And database and connection 24 | And successful pgmigrate run with "-t 2 migrate" 25 | And not committed query "UPDATE test SET id = 2 WHERE id = 1" 26 | When we run pgmigrate with "-l 0.1 -t 3 migrate" 27 | Then pgmigrate command "succeeded" 28 | 29 | Scenario: Mixed transactional and nontransactional migrations blocked by update pass 30 | Given migration dir 31 | And migrations 32 | | file | code | 33 | | V1__Transactional_migration.sql | ALTER TABLE test ADD COLUMN test text; | 34 | | V2__NONTRANSACTIONAL_migration.sql | ALTER TABLE test ADD COLUMN test2 text; | 35 | And database and connection 36 | And query "CREATE TABLE test (id bigint)" 37 | And query "INSERT INTO test (id) VALUES (1)" 38 | And not committed query "UPDATE test SET id = 2 WHERE id = 1" 39 | When we run pgmigrate with "-l 0.1 -t 2 migrate" 40 | Then pgmigrate command "succeeded" 41 | -------------------------------------------------------------------------------- /features/dryrun.feature: -------------------------------------------------------------------------------- 1 | Feature: Dryrun 2 | 3 | Scenario: One migration in dir applies after migrate command 4 | Given migration dir 5 | And migrations 6 | | file | code | 7 | | V1__Single_migration.sql | SELECT 1; | 8 | And database and connection 9 | When we run pgmigrate with "-n -t 1 migrate" 10 | Then pgmigrate command "succeeded" 11 | And database has no schema_version table 12 | -------------------------------------------------------------------------------- /features/empty_is_uninitialized.feature: -------------------------------------------------------------------------------- 1 | Feature: Empty database database has no schema_version table 2 | 3 | Scenario: Check uninitialized 4 | Given database and connection 5 | Then database has no schema_version table 6 | -------------------------------------------------------------------------------- /features/environment.py: -------------------------------------------------------------------------------- 1 | import shutil 2 | 3 | 4 | def before_scenario(context, scenario): 5 | try: 6 | context.last_migrate_res = {} 7 | context.callbacks = [] 8 | context.migrate_config = {} 9 | shutil.rmtree(context.migr_dir) 10 | except Exception: 11 | pass 12 | 13 | 14 | def after_all(context): 15 | try: 16 | context.last_migrate_res = {} 17 | context.callbacks = [] 18 | context.migrate_config = {} 19 | shutil.rmtree(context.migr_dir) 20 | except Exception: 21 | pass 22 | -------------------------------------------------------------------------------- /features/errors_handle.feature: -------------------------------------------------------------------------------- 1 | Feature: Handling migration errors 2 | 3 | Scenario: Conflicting migration versions 4 | Given migration dir 5 | And migrations 6 | | file | code | 7 | | V1__Single_migration.sql | SELECT 1; | 8 | | V1__Another_migration.sql | SELECT 1; | 9 | Then versions conflict with version=1 10 | 11 | Scenario: Migration with bad sql 12 | Given migration dir 13 | And migrations 14 | | file | code | 15 | | V1__Single_migration.sql | THIS_IS_ERROR | 16 | And database and connection 17 | When we run pgmigrate with "-t 1 migrate" 18 | Then pgmigrate command "failed" 19 | And database has no schema_version table 20 | And migrate command failed with THIS_IS_ERROR 21 | 22 | Scenario: Migration without target 23 | Given migration dir 24 | And database and connection 25 | When we run pgmigrate with "migrate" 26 | Then pgmigrate command "failed" 27 | And database has no schema_version table 28 | And migrate command failed with Unknown target 29 | 30 | Scenario: Missing migrations subdir 31 | Given migration dir 32 | And removed migrations subdir 33 | When we run pgmigrate with "-t 1 migrate" 34 | Then migrate command failed with Migrations dir not found 35 | 36 | Scenario: Wrong schema_version structure 37 | Given migration dir 38 | And database and connection 39 | And query "CREATE TABLE public.schema_version (bla text, blabla text);" 40 | When we run pgmigrate with "-t 1 migrate" 41 | Then pgmigrate command "failed" 42 | And migrate command failed with unexpected structure 43 | 44 | Scenario: Migration with non-ascii symbols 45 | Given migration dir 46 | And migrations 47 | | file | code | 48 | | V1__Single_migration.sql | テスト | 49 | And database and connection 50 | When we run pgmigrate with "-t 1 migrate" 51 | Then pgmigrate command "failed" 52 | And database has no schema_version table 53 | And migrate command failed with Non ascii symbols in file 54 | 55 | Scenario: Mix of transactional and nontransactional migrations 56 | Given migration dir 57 | And migrations 58 | | file | code | 59 | | V1__Transactional_migration.sql | SELECT 1; | 60 | | V2__NONTRANSACTIONAL_migration.sql | SELECT 1; | 61 | | V3__Transactional_migration.sql | SELECT 1; | 62 | And database and connection 63 | And successful pgmigrate run with "-t 1 migrate" 64 | When we run pgmigrate with "-t 3 migrate" 65 | Then pgmigrate command "failed" 66 | And database contains schema_version 67 | And migrate command failed with Unable to mix 68 | 69 | Scenario: Empty user name 70 | Given migration dir 71 | And migrations 72 | | file | code | 73 | | V1__Single_migration.sql | SELECT 1; | 74 | And database and connection 75 | When we run pgmigrate with "-u -t 1 migrate" 76 | Then pgmigrate command "failed" 77 | And database has no schema_version table 78 | And migrate command failed with Empty user name 79 | 80 | Scenario: Baseline on applied version 81 | Given migration dir 82 | And migrations 83 | | file | code | 84 | | V1__Single_migration.sql | SELECT 1; | 85 | And database and connection 86 | And successful pgmigrate run with "-t 1 migrate" 87 | When we run pgmigrate with "-b 1 baseline" 88 | Then pgmigrate command "failed" 89 | And database contains schema_version 90 | And migrate command failed with already applied 91 | 92 | Scenario: Invalid callback types 93 | Given migration dir 94 | And database and connection 95 | When we run pgmigrate with "-a INVALID -t 1 migrate" 96 | Then pgmigrate command "failed" 97 | And database has no schema_version table 98 | And migrate command failed with Unexpected callback type 99 | 100 | Scenario: Invalid callback types from config 101 | Given migration dir 102 | And database and connection 103 | And config callbacks 104 | | type | file | code | 105 | | INVALID | callback.sql | SELECT 1; | 106 | When we run pgmigrate with "-t 1 migrate" 107 | Then pgmigrate command "failed" 108 | And database has no schema_version table 109 | And migrate command failed with Unexpected callback type 110 | 111 | Scenario: Missing callback files 112 | Given migration dir 113 | And database and connection 114 | When we run pgmigrate with "-a afterAll:missing.sql -t 1 migrate" 115 | Then pgmigrate command "failed" 116 | And database has no schema_version table 117 | And migrate command failed with Path unavailable 118 | 119 | Scenario: Invalid callback types from config 120 | Given migration dir 121 | And database and connection 122 | And config callbacks 123 | | type | file | 124 | | afterAll | callback.sql | 125 | When we run pgmigrate with "-t 1 migrate" 126 | Then pgmigrate command "failed" 127 | And database has no schema_version table 128 | And migrate command failed with Path unavailable 129 | 130 | Scenario: Dry run for nontransactional migrations 131 | Given migration dir 132 | And migrations 133 | | file | code | 134 | | V1__Transactional_migration.sql | SELECT 1; | 135 | | V2__NONTRANSACTIONAL_migration.sql | SELECT 1; | 136 | And database and connection 137 | And successful pgmigrate run with "-t 1 migrate" 138 | When we run pgmigrate with "-n -t 2 migrate" 139 | Then pgmigrate command "failed" 140 | And database contains schema_version 141 | And migrate command failed with is nonsense 142 | 143 | Scenario: Nontransactional migration on empty database 144 | Given migration dir 145 | And migrations 146 | | file | code | 147 | | V1__NONTRANSACTIONAL_migration.sql | SELECT 1; | 148 | And database and connection 149 | When we run pgmigrate with "-t 1 migrate" 150 | Then pgmigrate command "failed" 151 | And migrate command failed with First migration MUST be transactional 152 | And database has no schema_version table 153 | 154 | Scenario: Version gaps 155 | Given migration dir 156 | And migrations 157 | | file | code | 158 | | V2__migration1.sql | SELECT 1; | 159 | | V5__migration2.sql | SELECT 1; | 160 | And database and connection 161 | When we run pgmigrate with "--check_serial_versions -t 5 migrate" 162 | Then pgmigrate command "failed" 163 | And migrate command failed with missing versions 3, 4 164 | And database has no schema_version table 165 | 166 | Scenario: Version gaps with applied migration 167 | Given migration dir 168 | And migrations 169 | | file | code | 170 | | V2__migration1.sql | SELECT 1; | 171 | | V5__migration2.sql | SELECT 1; | 172 | And database and connection 173 | And successful pgmigrate run with "--check_serial_versions -t 2 migrate" 174 | When we run pgmigrate with "--check_serial_versions -t 5 migrate" 175 | Then pgmigrate command "failed" 176 | And migrate command failed with missing versions 3, 4 177 | -------------------------------------------------------------------------------- /features/info.feature: -------------------------------------------------------------------------------- 1 | Feature: Info 2 | 3 | Scenario: Info prints applied migration 4 | Given migration dir 5 | And migrations 6 | | file | code | 7 | | V1__Single_migration.sql | SELECT 1; | 8 | And database and connection 9 | And successful pgmigrate run with "-t 1 migrate" 10 | When we run pgmigrate with "info" 11 | Then migrate command passed with Single migration 12 | 13 | Scenario: Info filters out applied migrations 14 | Given migration dir 15 | And migrations 16 | | file | code | 17 | | V1__Applied_migration.sql | SELECT 1; | 18 | | V2__Unapplied_migration.sql | SELECT 1; | 19 | And database and connection 20 | And successful pgmigrate run with "-t 1 migrate" 21 | When we run pgmigrate with "-o info" 22 | Then migrate command output matches json 23 | """ 24 | { 25 | "2": { 26 | "version": 2, 27 | "type": "auto", 28 | "installed_by": null, 29 | "installed_on": null, 30 | "description": "Unapplied migration", 31 | "transactional": true 32 | } 33 | } 34 | """ 35 | -------------------------------------------------------------------------------- /features/migrations_from_dir.feature: -------------------------------------------------------------------------------- 1 | Feature: Getting migrations from dir 2 | 3 | Scenario: Empty dir gives empty migrations list 4 | Given migration dir 5 | Then migration list is empty 6 | 7 | Scenario: One migration in dir gives migration list with only this migration 8 | Given migration dir 9 | And migrations 10 | | file | code | 11 | | V1__Single_migration.sql | SELECT 1; | 12 | Then migration list equals single transactional migration 13 | 14 | Scenario: Garbage migrations are properly ignored 15 | Given migration dir 16 | And migrations 17 | | file | code | 18 | | V1__Single_migration.sql | SELECT 1; | 19 | | kekekeke.sql | SELECT 1; | 20 | And migration dir "V2__Dir_migration.sql" 21 | Then migration list equals single transactional migration 22 | 23 | Scenario: One migration in dir applies after migrate command 24 | Given migration dir 25 | And migrations 26 | | file | code | 27 | | V1__Single_migration.sql | SELECT 1; | 28 | And database and connection 29 | When we run pgmigrate with "-t 1 migrate" 30 | Then pgmigrate command "succeeded" 31 | And database contains schema_version 32 | And migration info contains single migration 33 | 34 | Scenario: Migration with comment correctly applied 35 | Given migration dir 36 | And migrations 37 | | file | code | 38 | | V1__Single_migration.sql | SELECT 1;\n\n-- Comment\n-- Comment 2\n| 39 | And database and connection 40 | When we run pgmigrate with "-t 1 migrate" 41 | Then pgmigrate command "succeeded" 42 | And database contains schema_version 43 | And migration info contains single migration 44 | 45 | Scenario: Python format is ignored in migration text 46 | Given migration dir 47 | And migrations 48 | | file | code | 49 | | V1__Single_migration.sql | SELECT '%02d'; | 50 | And database and connection 51 | When we run pgmigrate with "-t 1 migrate" 52 | Then pgmigrate command "succeeded" 53 | And database contains schema_version 54 | And migration info contains single migration 55 | 56 | Scenario: 'latest' target migrates to latest version 57 | Given migration dir 58 | And migrations 59 | | file | code | 60 | | V1__Single_migration.sql | INSERT INTO mycooltable (op) values ('Migration 1'); | 61 | | V2__Another_migration.sql | INSERT INTO mycooltable (op) values ('Migration 2'); | 62 | And callbacks 63 | | type | file | code | 64 | | beforeAll | before_all.sql | CREATE TABLE mycooltable (seq SERIAL PRIMARY KEY, op TEXT); | 65 | And database and connection 66 | When we run pgmigrate with our callbacks and "-t latest migrate" 67 | Then pgmigrate command "succeeded" 68 | And database contains schema_version 69 | And query "SELECT * from mycooltable order by seq;" equals 70 | | seq | op | 71 | | 1 | Migration 1 | 72 | | 2 | Migration 2 | 73 | 74 | Scenario: Callbacks are executed in correct order 75 | Given migration dir 76 | And migrations 77 | | file | code | 78 | | V1__Single_migration.sql | INSERT INTO mycooltable (op) values ('Migration 1'); | 79 | | V2__Another_migration.sql | INSERT INTO mycooltable (op) values ('Migration 2'); | 80 | And callbacks 81 | | type | file | code | 82 | | beforeAll | before_all.sql | CREATE TABLE mycooltable (seq SERIAL PRIMARY KEY, op TEXT); | 83 | | beforeEach | before_each.sql | INSERT INTO mycooltable (op) values ('Before each'); | 84 | | afterEach | after_each.sql | INSERT INTO mycooltable (op) values ('After each'); | 85 | | afterAll | after_all.sql | INSERT INTO mycooltable (op) values ('After all'); | 86 | And database and connection 87 | When we run pgmigrate with our callbacks and "-t 2 migrate" 88 | Then pgmigrate command "succeeded" 89 | And database contains schema_version 90 | And query "SELECT * from mycooltable order by seq;" equals 91 | | seq | op | 92 | | 1 | Before each | 93 | | 2 | Migration 1 | 94 | | 3 | After each | 95 | | 4 | Before each | 96 | | 5 | Migration 2 | 97 | | 6 | After each | 98 | | 7 | After all | 99 | 100 | Scenario: Callbacks are executed from dir 101 | Given migration dir 102 | And migrations 103 | | file | code | 104 | | V1__Single_migration.sql | CREATE TABLE mycooltable (seq SERIAL PRIMARY KEY, op TEXT); | 105 | And callbacks 106 | | type | file | code | 107 | | afterAll | after_all.sql | INSERT INTO mycooltable (op) values ('After all'); | 108 | And database and connection 109 | When we run pgmigrate with dir callbacks and type "afterAll" and "-t 2 migrate" 110 | Then pgmigrate command "succeeded" 111 | And database contains schema_version 112 | And query "SELECT * from mycooltable order by seq;" equals 113 | | seq | op | 114 | | 1 | After all | 115 | -------------------------------------------------------------------------------- /features/modeline.feature: -------------------------------------------------------------------------------- 1 | Feature: Modelines in migration/callback files 2 | 3 | Scenario: Migration with non-ascii symbols and modeline 4 | Given migration dir 5 | And migrations 6 | | file | code | 7 | | V1__Single_migration.sql | /* pgmigrate-encoding: utf-8 */SELECT 'テスト'; | 8 | And database and connection 9 | When we run pgmigrate with "-t 1 migrate" 10 | Then pgmigrate command "succeeded" 11 | And database contains schema_version 12 | -------------------------------------------------------------------------------- /features/nontransactional_migrations.feature: -------------------------------------------------------------------------------- 1 | Feature: Nontransactional migrations support 2 | 3 | Scenario: Callbacks are not executed on nontransactional migration 4 | Given migration dir 5 | And migrations 6 | | file | code | 7 | | V1__Transactional_migration.sql | INSERT INTO mycooltable (op) values ('Migration 1'); | 8 | | V2__NONTRANSACTIONAL_migration.sql | INSERT INTO mycooltable (op) values ('Migration 2'); | 9 | And callbacks 10 | | type | file | code | 11 | | beforeAll | before_all.sql | CREATE TABLE mycooltable (seq SERIAL PRIMARY KEY, op TEXT); | 12 | | beforeEach | before_each.sql | INSERT INTO mycooltable (op) values ('Before each'); | 13 | | afterEach | after_each.sql | INSERT INTO mycooltable (op) values ('After each'); | 14 | | afterAll | after_all.sql | INSERT INTO mycooltable (op) values ('After all'); | 15 | And database and connection 16 | And successful pgmigrate run with our callbacks and "-t 1 migrate" 17 | When we run pgmigrate with our callbacks and "-t 2 migrate" 18 | Then pgmigrate command "succeeded" 19 | And database contains schema_version 20 | And query "SELECT * from mycooltable order by seq;" equals 21 | | seq | op | 22 | | 1 | Before each | 23 | | 2 | Migration 1 | 24 | | 3 | After each | 25 | | 4 | After all | 26 | | 5 | Migration 2 | 27 | 28 | Scenario: Callbacks are executed on nontransactional migration on empty database in correct order 1 29 | Given migration dir 30 | And migrations 31 | | file | code | 32 | | V1__Transactional_migration.sql | INSERT INTO mycooltable (op) values ('Migration 1'); | 33 | | V2__NONTRANSACTIONAL_migration.sql | INSERT INTO mycooltable (op) values ('Migration 2'); | 34 | And callbacks 35 | | type | file | code | 36 | | beforeAll | before_all.sql | CREATE TABLE mycooltable (seq SERIAL PRIMARY KEY, op TEXT); | 37 | | beforeEach | before_each.sql | INSERT INTO mycooltable (op) values ('Before each'); | 38 | | afterEach | after_each.sql | INSERT INTO mycooltable (op) values ('After each'); | 39 | | afterAll | after_all.sql | INSERT INTO mycooltable (op) values ('After all'); | 40 | And database and connection 41 | When we run pgmigrate with our callbacks and "-t 2 migrate" 42 | Then pgmigrate command "succeeded" 43 | And migrate command passed with Migrating to version 2 44 | And database contains schema_version 45 | And query "SELECT * from mycooltable order by seq;" equals 46 | | seq | op | 47 | | 1 | Before each | 48 | | 2 | Migration 1 | 49 | | 3 | After each | 50 | | 4 | After all | 51 | | 5 | Migration 2 | 52 | 53 | Scenario: Callbacks are executed on nontransactional migration on empty database in correct order 2 54 | Given migration dir 55 | And migrations 56 | | file | code | 57 | | V1__Transactional_migration.sql | INSERT INTO mycooltable (op) values ('Migration 1'); | 58 | | V2__NONTRANSACTIONAL_migration.sql | INSERT INTO mycooltable (op) values ('Migration 2'); | 59 | | V3__Transactional_migration.sql | INSERT INTO mycooltable (op) values ('Migration 3'); | 60 | And callbacks 61 | | type | file | code | 62 | | beforeAll | before_all.sql | CREATE TABLE mycooltable (seq SERIAL PRIMARY KEY, op TEXT); | 63 | | beforeEach | before_each.sql | INSERT INTO mycooltable (op) values ('Before each'); | 64 | | afterEach | after_each.sql | INSERT INTO mycooltable (op) values ('After each'); | 65 | | afterAll | after_all.sql | INSERT INTO mycooltable (op) values ('After all'); | 66 | And database and connection 67 | When we run pgmigrate with our callbacks and "-t 3 migrate" 68 | Then pgmigrate command "succeeded" 69 | And migrate command passed with Migrating to version 3 70 | And database contains schema_version 71 | And query "SELECT * from mycooltable order by seq;" equals 72 | | seq | op | 73 | | 1 | Before each | 74 | | 2 | Migration 1 | 75 | | 3 | After each | 76 | | 4 | Migration 2 | 77 | | 5 | Before each | 78 | | 6 | Migration 3 | 79 | | 7 | After each | 80 | | 8 | After all | 81 | 82 | Scenario: Callbacks are executed on nontransactional migration on empty database in correct order 3 83 | Given migration dir 84 | And migrations 85 | | file | code | 86 | | V1__Transactional_migration.sql | INSERT INTO mycooltable (op) values ('Migration 1'); | 87 | | V2__NONTRANSACTIONAL_migration.sql | INSERT INTO mycooltable (op) values ('Migration 2'); | 88 | | V3__Transactional_migration.sql | INSERT INTO mycooltable (op) values ('Migration 3'); | 89 | | V4__NONTRANSACTIONAL_migration.sql | INSERT INTO mycooltable (op) values ('Migration 4'); | 90 | | V5__NONTRANSACTIONAL_migration.sql | INSERT INTO mycooltable (op) values ('Migration 5'); | 91 | And callbacks 92 | | type | file | code | 93 | | beforeAll | before_all.sql | CREATE TABLE mycooltable (seq SERIAL PRIMARY KEY, op TEXT); | 94 | | beforeEach | before_each.sql | INSERT INTO mycooltable (op) values ('Before each'); | 95 | | afterEach | after_each.sql | INSERT INTO mycooltable (op) values ('After each'); | 96 | | afterAll | after_all.sql | INSERT INTO mycooltable (op) values ('After all'); | 97 | And database and connection 98 | When we run pgmigrate with our callbacks and "-t latest migrate" 99 | Then pgmigrate command "succeeded" 100 | And migrate command passed with Migrating to version 5 101 | And database contains schema_version 102 | And query "SELECT * from mycooltable order by seq;" equals 103 | | seq | op | 104 | | 1 | Before each | 105 | | 2 | Migration 1 | 106 | | 3 | After each | 107 | | 4 | Migration 2 | 108 | | 5 | Before each | 109 | | 6 | Migration 3 | 110 | | 7 | After each | 111 | | 8 | After all | 112 | | 9 | Migration 4 | 113 | | 10 | Migration 5 | 114 | -------------------------------------------------------------------------------- /features/schema.feature: -------------------------------------------------------------------------------- 1 | Feature: Schema restriction 2 | 3 | Scenario: Transactional migration restricted to schema pass 4 | Given migration dir 5 | And migrations 6 | | file | code | 7 | | V1__Single_migration.sql | CREATE TABLE "test-schema".test (id bigint); | 8 | And database and connection 9 | And successful pgmigrate run with "-t 1 -m test-schema migrate" 10 | Then database contains schema_version in schema "test-schema" 11 | And migration info contains single migration in schema "test-schema" 12 | 13 | Scenario: Nontransactional migration restricted to schema fails 14 | Given migration dir 15 | And migrations 16 | | file | code | 17 | | V1__NONTRANSACTIONAL_migration.sql | CREATE TABLE "test-schema".test (id bigint); | 18 | And database and connection 19 | When we run pgmigrate with "-t 1 -m test-schema migrate" 20 | Then migrate command failed with Schema check is not available for nontransactional migrations 21 | 22 | Scenario: Nontransactional migration with disabled schema restriction passes 23 | Given migration dir 24 | And migrations 25 | | file | code | 26 | | V1__Create_test_table.sql | CREATE TABLE "test-schema".test (id bigint); | 27 | | V2__NONTRANSACTIONAL_migration.sql | INSERT INTO "test-schema".test (id) VALUES (1); | 28 | And database and connection 29 | And successful pgmigrate run with "-t 2 -m test-schema --disable_schema_check migrate" 30 | Then database contains schema_version in schema "test-schema" 31 | 32 | Scenario: Transactional migration with restriction violation fails 33 | Given migration dir 34 | And migrations 35 | | file | code | 36 | | V1__Create_public_table.sql | CREATE TABLE public.test (id bigint); | 37 | And database and connection 38 | When we run pgmigrate with "-t 1 -m test-schema migrate" 39 | Then migrate command failed with Unexpected relations used in migrations: public.test 40 | -------------------------------------------------------------------------------- /features/steps/callbacks.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | from behave import given, when 4 | 5 | 6 | @given('callbacks') 7 | def step_impl(context): 8 | for row in context.table: 9 | path = os.path.join('callbacks', row['file']) 10 | context.callbacks.append(row['type'] + ':' + path) 11 | if row.get('code', False): 12 | with open(os.path.join(context.migr_dir, path), 'w') as f: 13 | f.write(row['code']) 14 | 15 | 16 | @given('config callbacks') # noqa 17 | def step_impl(context): 18 | for row in context.table: 19 | if row.get('dir', False): 20 | dir_path = os.path.join('callbacks', row['dir']) 21 | path = os.path.join(dir_path, row['file']) 22 | else: 23 | dir_path = None 24 | path = os.path.join('callbacks', row['file']) 25 | if 'callbacks' not in context.migrate_config: 26 | context.migrate_config['callbacks'] = {} 27 | if row['type'] not in context.migrate_config['callbacks']: 28 | context.migrate_config['callbacks'][row['type']] = [] 29 | if dir_path: 30 | context.migrate_config['callbacks'][row['type']].append(dir_path) 31 | if not os.path.exists(os.path.join(context.migr_dir, dir_path)): 32 | os.mkdir(os.path.join(context.migr_dir, dir_path)) 33 | else: 34 | context.migrate_config['callbacks'][row['type']].append(path) 35 | if row.get('code', False): 36 | with open(os.path.join(context.migr_dir, path), 'w') as f: 37 | f.write(row['code']) 38 | 39 | 40 | @given('successful pgmigrate run with our callbacks and "{args}"') # noqa 41 | def step_impl(context, args): 42 | cbs = ','.join(context.callbacks) 43 | context.execute_steps('given successful pgmigrate run with ' + '"%s"' % 44 | ('-a ' + cbs + ' ' + args, )) 45 | 46 | 47 | @when('we run pgmigrate with our callbacks and "{args}"') # noqa 48 | def step_impl(context, args): 49 | cbs = ','.join(context.callbacks) 50 | context.execute_steps('when we run pgmigrate with ' + '"%s"' % 51 | ('-a ' + cbs + ' ' + args, )) 52 | 53 | 54 | @when('we run pgmigrate with dir callbacks and type "{cb_type}" and "{args}"' 55 | ) # noqa 56 | def step_impl(context, cb_type, args): 57 | p_args = '-a ' + cb_type + ':' + context.migr_dir + '/callbacks/ ' + args 58 | context.execute_steps('when we run pgmigrate with "%s"' % (p_args, )) 59 | -------------------------------------------------------------------------------- /features/steps/check_if_contains_schema_version.py: -------------------------------------------------------------------------------- 1 | from behave import then 2 | from pgmigrate import _is_initialized 3 | 4 | 5 | @then("database contains schema_version") 6 | @then('database contains schema_version in schema "{schema}"') 7 | def step_impl(context, schema='public'): 8 | cur = context.conn.cursor() 9 | assert _is_initialized(schema, cur), 'Non-empty db should be initialized' 10 | -------------------------------------------------------------------------------- /features/steps/check_if_empty.py: -------------------------------------------------------------------------------- 1 | from behave import then 2 | from pgmigrate import _is_initialized 3 | 4 | 5 | @then("database has no schema_version table") 6 | @then('database has no schema_version table in schema "{schema}"') 7 | def step_impl(context, schema='public'): 8 | cur = context.conn.cursor() 9 | assert not _is_initialized(schema, cur), 'Database should be uninitialized' 10 | -------------------------------------------------------------------------------- /features/steps/config.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | import yaml 4 | 5 | from behave import given 6 | 7 | 8 | @given('config') 9 | def step_impl(context): 10 | data = yaml.safe_load(context.text) 11 | context.migrate_config = data 12 | 13 | 14 | @given('empty config') # noqa 15 | def step_impl(context): 16 | open(os.path.join(context.migr_dir, 'migrations.yml'), 'w').close() 17 | -------------------------------------------------------------------------------- /features/steps/conflicting_versions.py: -------------------------------------------------------------------------------- 1 | from behave import then 2 | from pgmigrate import MalformedMigration, _get_migrations_info_from_dir 3 | 4 | 5 | @then('versions conflict with version={version}') 6 | def step_impl(context, version): 7 | try: 8 | _get_migrations_info_from_dir(context.migr_dir) 9 | except MalformedMigration as e: 10 | assert 'migrations with same version: ' + str(version) in str(e) 11 | return 12 | raise RuntimeError('No failure on version conflict') 13 | -------------------------------------------------------------------------------- /features/steps/database_and_connection.py: -------------------------------------------------------------------------------- 1 | import psycopg2 2 | 3 | from behave import given 4 | 5 | 6 | @given('database and connection') 7 | def step_impl(context): 8 | context.conn = None 9 | conn = psycopg2.connect('dbname=postgres') 10 | conn.autocommit = True 11 | cur = conn.cursor() 12 | cur.execute("select pg_terminate_backend(pid) " + 13 | "from pg_stat_activity where datname='pgmigratetest'") 14 | cur.execute('drop database if exists pgmigratetest') 15 | cur.execute('create database pgmigratetest') 16 | context.conn = psycopg2.connect('dbname=pgmigratetest') 17 | -------------------------------------------------------------------------------- /features/steps/migration.py: -------------------------------------------------------------------------------- 1 | import io 2 | import os 3 | 4 | from behave import given 5 | 6 | 7 | @given('migrations') # noqa 8 | def step_impl(context): 9 | migrations_path = os.path.join(context.migr_dir, 'migrations') 10 | for row in context.table: 11 | path = os.path.join(migrations_path, row['file']) 12 | with io.open(path, 'w', encoding='utf-8') as f: 13 | f.write(row['code'].replace('\\n', '\n')) 14 | 15 | 16 | @given('migration dir "{dirname}"') # noqa 17 | def step_impl(context, dirname): 18 | migrations_path = os.path.join(context.migr_dir, 'migrations') 19 | path = os.path.join(migrations_path, dirname) 20 | os.mkdir(path) 21 | -------------------------------------------------------------------------------- /features/steps/migration_dir.py: -------------------------------------------------------------------------------- 1 | import os 2 | import shutil 3 | import tempfile 4 | 5 | from behave import given 6 | 7 | 8 | @given('migration dir') 9 | def step_migrations_dir(context): 10 | try: 11 | shutil.rmtree(context.migr_dir) 12 | except Exception: 13 | pass 14 | context.migr_dir = tempfile.mkdtemp() 15 | os.mkdir(os.path.join(context.migr_dir, 'migrations')) 16 | os.mkdir(os.path.join(context.migr_dir, 'callbacks')) 17 | 18 | 19 | @given('removed migrations subdir') 20 | def step_removed_subdir(context): 21 | shutil.rmtree(os.path.join(context.migr_dir, 'migrations')) 22 | -------------------------------------------------------------------------------- /features/steps/migration_failed.py: -------------------------------------------------------------------------------- 1 | from behave import then 2 | 3 | 4 | @then('migrate command failed with {error}') 5 | def step_impl(context, error): 6 | assert context.last_migrate_res['ret'] != 0, \ 7 | 'Not failed with: ' + context.last_migrate_res['err'].decode('utf-8', 8 | 'ignore') 9 | assert error in context.last_migrate_res['err'], \ 10 | 'Actual result: ' + context.last_migrate_res['err'].decode('utf-8', 11 | 'ignore') 12 | -------------------------------------------------------------------------------- /features/steps/migration_info_contains_forced_baseline.py: -------------------------------------------------------------------------------- 1 | from behave import then 2 | from pgmigrate import _get_info 3 | 4 | 5 | @then("migration info contains forced baseline={baseline}") 6 | @then( 7 | 'migration info contains forced baseline={baseline} in schema "{schema}"', 8 | ) 9 | def step_impl(context, baseline, schema='public'): 10 | cur = context.conn.cursor() 11 | info = _get_info(context.migr_dir, 0, 1, schema, cur) 12 | assert list(info.values())[0].meta['version'] == int(baseline) 13 | assert list(info.values())[0].meta['description'] == 'Forced baseline' 14 | -------------------------------------------------------------------------------- /features/steps/migration_info_contains_single_migration.py: -------------------------------------------------------------------------------- 1 | from behave import then 2 | from pgmigrate import _get_info 3 | 4 | 5 | @then("migration info contains single migration") 6 | @then('migration info contains single migration in schema "{schema}"') 7 | def step_impl(context, schema='public'): 8 | cur = context.conn.cursor() 9 | info = _get_info(context.migr_dir, 0, 1, schema, cur) 10 | assert list(info.values())[0].meta['version'] == 1 11 | assert list(info.values())[0].meta['description'] == 'Single migration' 12 | -------------------------------------------------------------------------------- /features/steps/migration_list_empty.py: -------------------------------------------------------------------------------- 1 | from behave import then 2 | from pgmigrate import _get_migrations_info_from_dir 3 | 4 | 5 | @then('migration list is empty') 6 | def step_impl(context): 7 | assert len(_get_migrations_info_from_dir(context.migr_dir).keys()) == 0 8 | -------------------------------------------------------------------------------- /features/steps/migration_list_equals_single_transactional_migration.py: -------------------------------------------------------------------------------- 1 | from behave import then 2 | from pgmigrate import _get_migrations_info_from_dir 3 | 4 | 5 | @then('migration list equals single transactional migration') 6 | def step_impl(context): 7 | assert len(_get_migrations_info_from_dir(context.migr_dir).keys()) == 1 8 | migration = list(_get_migrations_info_from_dir( 9 | context.migr_dir).values())[0] 10 | assert migration.meta['version'] == 1 11 | assert migration.meta['description'] == 'Single migration' 12 | -------------------------------------------------------------------------------- /features/steps/migration_passed.py: -------------------------------------------------------------------------------- 1 | from behave import then 2 | 3 | 4 | @then('migrate command passed with {message}') 5 | def step_impl(context, message): 6 | assert context.last_migrate_res['ret'] == 0, \ 7 | 'Failed with: ' + context.last_migrate_res['err'] 8 | assert message in context.last_migrate_res['err'], \ 9 | 'Actual result: ' + context.last_migrate_res['err'] 10 | -------------------------------------------------------------------------------- /features/steps/pgmigrate_output.py: -------------------------------------------------------------------------------- 1 | import json 2 | 3 | from behave import then 4 | 5 | 6 | @then('migrate command output matches json') # noqa 7 | def step_impl(context): 8 | ref_data = json.loads(context.text) 9 | out_data = json.loads(context.last_migrate_res['out']) 10 | assert json.dumps(ref_data) == json.dumps(out_data), \ 11 | 'Actual result: ' + context.last_migrate_res['out'] 12 | -------------------------------------------------------------------------------- /features/steps/query.py: -------------------------------------------------------------------------------- 1 | from behave import given, then 2 | 3 | 4 | @given('not committed query "{query}"') # noqa 5 | def step_impl(context, query): 6 | cur = context.conn.cursor() 7 | cur.execute(query) 8 | 9 | 10 | @given('query "{query}"') # noqa 11 | def step_impl(context, query): 12 | cur = context.conn.cursor() 13 | cur.execute(query) 14 | cur.execute('commit;') 15 | 16 | 17 | @then('query "{query}" equals') # noqa 18 | def step_impl(context, query): 19 | cur = context.conn.cursor() 20 | cur.execute(query) 21 | r = cur.fetchall() 22 | formatted = ';'.join(map(lambda x: '|'.join(map(str, x)), r)) 23 | res = [] 24 | for row in context.table: 25 | res.append(row[0] + '|' + row[1]) 26 | result = ';'.join(res) 27 | assert formatted == result, 'Unexpected result: ' + formatted 28 | -------------------------------------------------------------------------------- /features/steps/run_pgmigrate.py: -------------------------------------------------------------------------------- 1 | import os 2 | import subprocess 3 | import sys 4 | 5 | import yaml 6 | from func_timeout import FunctionTimedOut, func_timeout 7 | 8 | from behave import given, then, when 9 | 10 | 11 | def run_pgmigrate(migr_dir, args): 12 | cmd = [ 13 | 'coverage', 'run', '-p', '--include=pgmigrate.py', './pgmigrate.py', 14 | '-vvv', '-d', migr_dir, '-c', 'dbname=pgmigratetest' 15 | ] + str(args).split(' ') 16 | 17 | p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) 18 | 19 | try: 20 | stdout, stderr = func_timeout(5, p.communicate) 21 | except FunctionTimedOut: 22 | p.terminate() 23 | stdout, stderr = p.communicate() 24 | if not isinstance(stdout, str): 25 | stdout = stdout.decode('utf-8') 26 | if not isinstance(stderr, str): 27 | stderr = stderr.decode('utf-8') 28 | return p.returncode, stdout, stderr 29 | 30 | 31 | @given('successful pgmigrate run with "{args}"') 32 | def step_impl(context, args): 33 | if context.migrate_config: 34 | with open(os.path.join(context.migr_dir, 'migrations.yml'), 'w') as f: 35 | yaml.safe_dump(context.migrate_config, 36 | f, 37 | encoding=None, 38 | default_flow_style=False, 39 | allow_unicode=True) 40 | res = run_pgmigrate(context.migr_dir, args) 41 | 42 | if res[0] != 0: 43 | sys.stdout.write(res[1]) 44 | sys.stderr.write(res[2]) 45 | raise Exception('Expected success got retcode=%d' % res[0]) 46 | 47 | 48 | @when('we run pgmigrate with "{args}"') # noqa 49 | def step_impl(context, args): 50 | if context.migrate_config: 51 | with open(os.path.join(context.migr_dir, 'migrations.yml'), 'w') as f: 52 | yaml.safe_dump(context.migrate_config, 53 | f, 54 | encoding=None, 55 | default_flow_style=False, 56 | allow_unicode=True) 57 | res = run_pgmigrate(context.migr_dir, args) 58 | 59 | context.last_migrate_res = {'ret': res[0], 'out': res[1], 'err': res[2]} 60 | 61 | 62 | @then('pgmigrate command "{result}"') # noqa 63 | def step_impl(context, result): 64 | if not context.last_migrate_res: 65 | raise Exception('No pgmigrate run detected in current context') 66 | 67 | if result == 'failed' and context.last_migrate_res['ret'] == 0: 68 | sys.stdout.write(str(context.last_migrate_res['out'])) 69 | sys.stderr.write(str(context.last_migrate_res['err'])) 70 | raise Exception('Expected failure got success') 71 | elif result == 'succeeded' and context.last_migrate_res['ret'] != 0: 72 | sys.stdout.write(str(context.last_migrate_res['out'])) 73 | sys.stderr.write(str(context.last_migrate_res['err'])) 74 | raise Exception('Expected success got retcode=' 75 | '%d' % context.last_migrate_res['ret']) 76 | elif result not in ['failed', 'succeeded']: 77 | raise Exception('Incorrect step arguments') 78 | -------------------------------------------------------------------------------- /pgmigrate.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | """ 3 | PGmigrate - PostgreSQL migrations made easy 4 | """ 5 | # -*- coding: utf-8 -*- 6 | # 7 | # Copyright (c) 2016-2025 Yandex LLC 8 | # Copyright (c) 2016-2025 Other contributors as noted in the AUTHORS file. 9 | # 10 | # Permission to use, copy, modify, and distribute this software and its 11 | # documentation for any purpose, without fee, and without a written 12 | # agreement is hereby granted, provided that the above copyright notice 13 | # and this paragraph and the following two paragraphs appear in all copies. 14 | # 15 | # IN NO EVENT SHALL YANDEX LLC BE LIABLE TO ANY PARTY FOR DIRECT, 16 | # INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES, INCLUDING LOST 17 | # PROFITS, ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, 18 | # EVEN IF YANDEX LLC HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 19 | # 20 | # YANDEX SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, BUT NOT 21 | # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A 22 | # PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS ON AN "AS IS" 23 | # BASIS, AND YANDEX LLC HAS NO OBLIGATIONS TO PROVIDE MAINTENANCE, 24 | # SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS. 25 | 26 | from __future__ import absolute_import, print_function, unicode_literals 27 | 28 | import argparse 29 | import codecs 30 | import json 31 | import logging 32 | import os 33 | import re 34 | import sys 35 | import threading 36 | import time 37 | import uuid 38 | from builtins import str as text 39 | from collections import OrderedDict, namedtuple 40 | from contextlib import closing 41 | 42 | import psycopg2 43 | import sqlparse 44 | import yaml 45 | from psycopg2.extensions import make_dsn, parse_dsn 46 | from psycopg2.extras import LoggingConnection 47 | from psycopg2.sql import SQL, Identifier 48 | 49 | LOG = logging.getLogger(__name__) 50 | 51 | 52 | class MigrateError(RuntimeError): 53 | """ 54 | Common migration error class 55 | """ 56 | 57 | 58 | class MalformedStatement(MigrateError): 59 | """ 60 | Incorrect statement exception 61 | """ 62 | 63 | 64 | class MalformedMigration(MigrateError): 65 | """ 66 | Incorrect migration exception 67 | """ 68 | 69 | 70 | class MalformedSchema(MigrateError): 71 | """ 72 | Incorrect schema exception 73 | """ 74 | 75 | 76 | class ConfigurationError(MigrateError): 77 | """ 78 | Incorrect config or cmd args exception 79 | """ 80 | 81 | 82 | class BaselineError(MigrateError): 83 | """ 84 | Baseline error class 85 | """ 86 | 87 | 88 | def get_conn_id(conn): 89 | """ 90 | Extract application_name from dsn 91 | """ 92 | parsed = parse_dsn(conn.dsn) 93 | return parsed['application_name'] 94 | 95 | 96 | class ConflictTerminator(threading.Thread): 97 | """ 98 | Kills conflicting pids (only on postgresql > 9.6) 99 | """ 100 | 101 | def __init__(self, conn_str, interval): 102 | threading.Thread.__init__(self, name='terminator') 103 | self.daemon = True 104 | self.log = logging.getLogger('terminator') 105 | self.conn_str = conn_str 106 | self.conns = set() 107 | self.interval = interval 108 | self.should_run = True 109 | self.conn = None 110 | 111 | def stop(self): 112 | """ 113 | Stop iterations and close connection 114 | """ 115 | self.should_run = False 116 | 117 | def add_conn(self, conn): 118 | """ 119 | Add conn pid to pgmirate pids list 120 | """ 121 | self.conns.add(get_conn_id(conn)) 122 | 123 | def remove_conn(self, conn): 124 | """ 125 | Remove conn from pgmigrate pids list 126 | """ 127 | self.conns.remove(get_conn_id(conn)) 128 | 129 | def run(self): 130 | """ 131 | Periodically terminate all backends blocking pgmigrate pids 132 | """ 133 | self.conn = _create_raw_connection(self.conn_str, self.log) 134 | self.conn.autocommit = True 135 | while self.should_run: 136 | with self.conn.cursor() as cursor: 137 | for conn_id in self.conns: 138 | cursor.execute( 139 | """ 140 | SELECT b.blocking_pid, 141 | pg_terminate_backend(b.blocking_pid) 142 | FROM (SELECT unnest(pg_blocking_pids(pid)) 143 | AS blocking_pid 144 | FROM pg_stat_activity 145 | WHERE application_name 146 | LIKE '%%' || %s || '%%') as b 147 | """, (conn_id, )) 148 | terminated = [x[0] for x in cursor.fetchall()] 149 | for i in terminated: 150 | self.log.info('Terminated conflicting pid: %s', i) 151 | time.sleep(self.interval) 152 | 153 | 154 | REF_COLUMNS = [ 155 | 'version', 156 | 'description', 157 | 'type', 158 | 'installed_by', 159 | 'installed_on', 160 | ] 161 | 162 | 163 | def _create_raw_connection(conn_string, logger=LOG): 164 | conn = psycopg2.connect(conn_string, connection_factory=LoggingConnection) 165 | conn.initialize(logger) 166 | 167 | return conn 168 | 169 | 170 | def _create_connection(config): 171 | conn_id = 'pgmigrate-{id}'.format(id=str(uuid.uuid4())) 172 | conn = _create_raw_connection( 173 | make_dsn(config.conn, application_name=conn_id)) 174 | if config.terminator_instance: 175 | config.terminator_instance.add_conn(conn) 176 | 177 | return conn 178 | 179 | 180 | def _init_cursor(conn, session): 181 | """ 182 | Get cursor initialized with session commands 183 | """ 184 | cursor = conn.cursor() 185 | for query in session: 186 | cursor.execute(query) 187 | LOG.info(cursor.statusmessage) 188 | 189 | return cursor 190 | 191 | 192 | def _is_initialized(schema, cursor): 193 | """ 194 | Check that database is initialized 195 | """ 196 | cursor.execute( 197 | 'SELECT EXISTS(SELECT 1 FROM ' 198 | 'information_schema.tables ' 199 | 'WHERE table_schema = %s ' 200 | 'AND table_name = %s)', (schema, 'schema_version')) 201 | table_exists = cursor.fetchone()[0] 202 | 203 | if not table_exists: 204 | return False 205 | 206 | cursor.execute( 207 | SQL('SELECT * from {schema}.schema_version limit 1').format( 208 | schema=Identifier(schema))) 209 | 210 | colnames = [desc[0] for desc in cursor.description] 211 | 212 | if colnames != REF_COLUMNS: 213 | raise MalformedSchema( 214 | ('Table {schema}.schema_version has unexpected ' 215 | 'structure: {struct}').format(schema=Identifier(schema), 216 | struct='|'.join(colnames))) 217 | 218 | return True 219 | 220 | 221 | MIGRATION_FILE_RE = re.compile(r'V(?P\d+)__(?P.+)\.sql$') 222 | 223 | MigrationInfo = namedtuple('MigrationInfo', ('meta', 'file_path')) 224 | 225 | Callbacks = namedtuple('Callbacks', 226 | ('beforeAll', 'beforeEach', 'afterEach', 'afterAll')) 227 | 228 | Config = namedtuple( 229 | 'Config', ('target', 'baseline', 'cursor', 'dryrun', 'callbacks', 'user', 230 | 'base_dir', 'conn', 'session', 'conn_instance', 231 | 'terminator_instance', 'termination_interval', 'schema', 232 | 'disable_schema_check', 'check_serial_versions', 233 | 'set_version_info_after_callbacks', 'show_only_unapplied')) 234 | 235 | CONFIG_IGNORE = ['cursor', 'conn_instance', 'terminator_instance'] 236 | 237 | 238 | def _get_files_from_dir(path): 239 | """ 240 | Get all files in all subdirs in path 241 | """ 242 | for root, _, files in os.walk(path): 243 | for fname in files: 244 | yield os.path.basename(fname), os.path.join(root, fname) 245 | 246 | 247 | def _get_migrations_info_from_dir(base_dir): 248 | """ 249 | Get all migrations from base dir 250 | """ 251 | path = os.path.join(base_dir, 'migrations') 252 | migrations = {} 253 | if not (os.path.exists(path) and os.path.isdir(path)): 254 | raise ConfigurationError( 255 | 'Migrations dir not found (expected to be {path})'.format( 256 | path=path)) 257 | for fname, file_path in _get_files_from_dir(path): 258 | match = MIGRATION_FILE_RE.match(fname) 259 | if match is None: 260 | LOG.warning('File %s does not match by pattern %s. Skipping it.', 261 | file_path, MIGRATION_FILE_RE.pattern) 262 | continue 263 | version = int(match.group('version')) 264 | ret = { 265 | 'version': version, 266 | 'type': 'auto', 267 | 'installed_by': None, 268 | 'installed_on': None, 269 | 'description': match.group('description').replace('_', ' '), 270 | } 271 | ret['transactional'] = 'NONTRANSACTIONAL' not in ret['description'] 272 | migration = MigrationInfo( 273 | ret, 274 | file_path, 275 | ) 276 | if version in migrations: 277 | raise MalformedMigration( 278 | ('Found migrations with same version: {version} ' 279 | '\nfirst : {first_path}' 280 | '\nsecond: {second_path}').format( 281 | version=version, 282 | first_path=migration.file_path, 283 | second_path=migrations[version].file_path)) 284 | migrations[version] = migration 285 | 286 | return migrations 287 | 288 | 289 | def _get_migrations_info(base_dir, baseline_v, target_v): 290 | """ 291 | Get migrations from baseline to target from base dir 292 | """ 293 | migrations = {} 294 | target = target_v if target_v is not None else float('inf') 295 | 296 | for version, ret in _get_migrations_info_from_dir(base_dir).items(): 297 | if baseline_v < version <= target: 298 | migrations[version] = ret 299 | else: 300 | LOG.info( 301 | 'Ignore migration %r cause baseline: %r or target: %r', 302 | ret, 303 | baseline_v, 304 | target, 305 | ) 306 | return migrations 307 | 308 | 309 | def _get_info(base_dir, baseline_v, target_v, schema, cursor): 310 | """ 311 | Get migrations info from database and base dir 312 | """ 313 | ret = {} 314 | cursor.execute( 315 | SQL('SELECT {columns} FROM {schema}.schema_version').format( 316 | schema=Identifier(schema), 317 | columns=SQL(', ').join([Identifier(x) for x in REF_COLUMNS]))) 318 | for i in cursor.fetchall(): 319 | version = {} 320 | for j in enumerate(REF_COLUMNS): 321 | if j[1] == 'installed_on': 322 | version[j[1]] = i[j[0]].strftime('%F %H:%M:%S') 323 | else: 324 | version[j[1]] = i[j[0]] 325 | version['version'] = int(version['version']) 326 | transactional = 'NONTRANSACTIONAL' not in version['description'] 327 | version['transactional'] = transactional 328 | ret[version['version']] = MigrationInfo(meta=version, file_path='') 329 | 330 | baseline_v = max(baseline_v, sorted(ret.keys())[-1]) 331 | 332 | migrations_info = _get_migrations_info(base_dir, baseline_v, target_v) 333 | for version in migrations_info: 334 | num = migrations_info[version].meta['version'] 335 | if num not in ret: 336 | ret[num] = migrations_info[version] 337 | 338 | return ret 339 | 340 | 341 | def _get_database_user(cursor): 342 | cursor.execute('SELECT CURRENT_USER') 343 | return cursor.fetchone()[0] 344 | 345 | 346 | def _get_state(base_dir, baseline_v, target, schema, cursor): 347 | """ 348 | Get info wrapper (able to handle noninitialized database) 349 | """ 350 | if _is_initialized(schema, cursor): 351 | return _get_info(base_dir, baseline_v, target, schema, cursor) 352 | return _get_migrations_info(base_dir, baseline_v, target) 353 | 354 | 355 | def _set_baseline(baseline_v, user, schema, cursor): 356 | """ 357 | Cleanup schema_version and set baseline 358 | """ 359 | cursor.execute( 360 | SQL('SELECT EXISTS(SELECT 1 FROM {schema}' 361 | '.schema_version WHERE version >= %s::bigint)').format( 362 | schema=Identifier(schema)), (baseline_v, )) 363 | check_failed = cursor.fetchone()[0] 364 | 365 | if check_failed: 366 | raise BaselineError( 367 | 'Unable to baseline, version ' 368 | '{version} already applied'.format(version=text(baseline_v))) 369 | 370 | LOG.info('cleaning up table schema_version') 371 | cursor.execute( 372 | SQL('DELETE FROM {schema}.schema_version').format( 373 | schema=Identifier(schema))) 374 | LOG.info(cursor.statusmessage) 375 | 376 | LOG.info('setting baseline') 377 | cursor.execute( 378 | SQL('INSERT INTO {schema}.schema_version ' 379 | '(version, type, description, installed_by) ' 380 | 'VALUES (%s::bigint, %s, %s, %s)').format( 381 | schema=Identifier(schema)), 382 | (text(baseline_v), 'manual', 'Forced baseline', user)) 383 | LOG.info(cursor.statusmessage) 384 | 385 | 386 | def _init_schema(schema, cursor): 387 | """ 388 | Create schema_version table 389 | """ 390 | cursor.execute( 391 | 'SELECT EXISTS(SELECT 1 FROM ' 392 | 'information_schema.schemata ' 393 | 'WHERE schema_name = %s)', (schema, )) 394 | schema_exists = cursor.fetchone()[0] 395 | if not schema_exists: 396 | LOG.info('creating schema') 397 | cursor.execute( 398 | SQL('CREATE SCHEMA IF NOT EXISTS {schema}').format( 399 | schema=Identifier(schema))) 400 | LOG.info('creating type schema_version_type') 401 | cursor.execute( 402 | SQL('CREATE TYPE {schema}.schema_version_type ' 403 | 'AS ENUM (%s, %s)').format(schema=Identifier(schema)), 404 | ('auto', 'manual')) 405 | LOG.info(cursor.statusmessage) 406 | LOG.info('creating table schema_version') 407 | cursor.execute( 408 | SQL('CREATE TABLE {schema}.schema_version (' 409 | 'version BIGINT NOT NULL PRIMARY KEY, ' 410 | 'description TEXT NOT NULL, ' 411 | 'type {schema}.schema_version_type NOT NULL ' 412 | 'DEFAULT %s, ' 413 | 'installed_by TEXT NOT NULL, ' 414 | 'installed_on TIMESTAMP WITHOUT time ZONE ' 415 | 'DEFAULT now() NOT NULL)').format(schema=Identifier(schema)), 416 | ('auto', )) 417 | LOG.info(cursor.statusmessage) 418 | 419 | 420 | def _get_statements(path): 421 | """ 422 | Get statements from file 423 | """ 424 | with codecs.open(path, encoding='utf-8') as i: 425 | data = i.read() 426 | if u'/* pgmigrate-encoding: utf-8 */' not in data: 427 | try: 428 | data.encode('ascii') 429 | except UnicodeError as exc: 430 | raise MalformedStatement( 431 | 'Non ascii symbols in file: {0}, {1}'.format(path, text(exc))) 432 | data = sqlparse.format(data, strip_comments=True) 433 | for statement in sqlparse.parsestream(data, encoding='utf-8'): 434 | st_str = text(statement).strip().encode('utf-8') 435 | if st_str: 436 | yield st_str 437 | 438 | 439 | def _apply_statement(statement, file_path, cursor): 440 | """ 441 | Execute statement using cursor 442 | """ 443 | try: 444 | cursor.execute(statement) 445 | except psycopg2.Error as exc: 446 | LOG.error('Error executing statement from %s:', file_path) 447 | for line in statement.splitlines(): 448 | LOG.error(line) 449 | LOG.error(exc) 450 | raise MigrateError('Unable to apply statement') 451 | 452 | 453 | def _apply_file(file_path, cursor): 454 | """ 455 | Execute all statements in file 456 | """ 457 | try: 458 | for statement in _get_statements(file_path): 459 | _apply_statement(statement, file_path, cursor) 460 | except MalformedStatement as exc: 461 | LOG.error(exc) 462 | raise exc 463 | 464 | 465 | def _apply_version(version_info, cursor): 466 | """ 467 | Execute all statements in migration version 468 | """ 469 | LOG.info('Try apply version %r', version_info) 470 | 471 | _apply_file(version_info.file_path, cursor) 472 | 473 | 474 | def _set_schema_version(version, version_info, user, schema, cursor): 475 | cursor.execute( 476 | SQL('INSERT INTO {schema}.schema_version ' 477 | '(version, description, installed_by) ' 478 | 'VALUES (%s::bigint, %s, %s)').format(schema=Identifier(schema)), 479 | (text(version), version_info.meta['description'], user)) 480 | 481 | 482 | def _parse_str_callbacks(callbacks, ret, base_dir): 483 | if not callbacks: 484 | return ret 485 | callbacks = callbacks.split(',') 486 | for callback in callbacks: 487 | if not callback: 488 | continue 489 | tokens = callback.split(':') 490 | if tokens[0] not in ret._fields: 491 | raise ConfigurationError( 492 | 'Unexpected callback ' 493 | 'type: {type}'.format(type=text(tokens[0]))) 494 | path = os.path.join(base_dir, tokens[1]) 495 | if not os.path.exists(path): 496 | raise ConfigurationError( 497 | 'Path unavailable: {path}'.format(path=text(path))) 498 | if os.path.isdir(path): 499 | for fname in sorted(os.listdir(path)): 500 | getattr(ret, tokens[0]).append(os.path.join(path, fname)) 501 | else: 502 | getattr(ret, tokens[0]).append(path) 503 | 504 | return ret 505 | 506 | 507 | def _parse_dict_callbacks(callbacks, ret, base_dir): 508 | for i in callbacks: 509 | if i in ret._fields: 510 | for j in callbacks[i] or []: 511 | path = os.path.join(base_dir, j) 512 | if not os.path.exists(path): 513 | raise ConfigurationError( 514 | 'Path unavailable: {path}'.format(path=text(path))) 515 | if os.path.isdir(path): 516 | for fname in sorted(os.listdir(path)): 517 | getattr(ret, i).append(os.path.join(path, fname)) 518 | else: 519 | getattr(ret, i).append(path) 520 | else: 521 | raise ConfigurationError( 522 | 'Unexpected callback type: {type}'.format(type=text(i))) 523 | 524 | return ret 525 | 526 | 527 | def _get_callbacks(callbacks, base_dir=''): 528 | """ 529 | Parse cmdline/config callbacks 530 | """ 531 | ret = Callbacks(beforeAll=[], beforeEach=[], afterEach=[], afterAll=[]) 532 | if isinstance(callbacks, dict): 533 | return _parse_dict_callbacks(callbacks, ret, base_dir) 534 | return _parse_str_callbacks(callbacks, ret, base_dir) 535 | 536 | 537 | def _migrate_step(state, callbacks, user, schema, 538 | set_version_info_after_callbacks, cursor): 539 | """ 540 | Apply one version with callbacks 541 | """ 542 | before_all_executed = False 543 | should_migrate = False 544 | if not _is_initialized(schema, cursor): 545 | LOG.info('schema not initialized') 546 | _init_schema(schema, cursor) 547 | for version in sorted(state.keys()): 548 | LOG.debug('has version %r', version) 549 | if state[version].meta['installed_on'] is None: 550 | should_migrate = True 551 | if not before_all_executed and callbacks.beforeAll: 552 | LOG.info('Executing beforeAll callbacks:') 553 | for callback in callbacks.beforeAll: 554 | _apply_file(callback, cursor) 555 | LOG.info(callback) 556 | before_all_executed = True 557 | 558 | LOG.info('Migrating to version %d', version) 559 | if callbacks.beforeEach: 560 | LOG.info('Executing beforeEach callbacks:') 561 | for callback in callbacks.beforeEach: 562 | LOG.info(callback) 563 | _apply_file(callback, cursor) 564 | 565 | _apply_version(state[version], cursor) 566 | 567 | if not set_version_info_after_callbacks: 568 | _set_schema_version(version, state[version], user, schema, 569 | cursor) 570 | 571 | if callbacks.afterEach: 572 | LOG.info('Executing afterEach callbacks:') 573 | for callback in callbacks.afterEach: 574 | LOG.info(callback) 575 | _apply_file(callback, cursor) 576 | 577 | if set_version_info_after_callbacks: 578 | _set_schema_version(version, state[version], user, schema, 579 | cursor) 580 | 581 | if should_migrate and callbacks.afterAll: 582 | LOG.info('Executing afterAll callbacks:') 583 | for callback in callbacks.afterAll: 584 | LOG.info(callback) 585 | _apply_file(callback, cursor) 586 | 587 | 588 | def _finish(config): 589 | if config.dryrun: 590 | config.cursor.execute('rollback') 591 | else: 592 | config.cursor.execute('commit') 593 | if config.terminator_instance: 594 | config.terminator_instance.stop() 595 | config.conn_instance.close() 596 | 597 | 598 | def info(config, stdout=True): 599 | """ 600 | Info cmdline wrapper 601 | """ 602 | state = _get_state(config.base_dir, config.baseline, config.target, 603 | config.schema, config.cursor) 604 | if stdout: 605 | out_state = OrderedDict() 606 | for version in sorted(state, key=int): 607 | if config.show_only_unapplied and state[version].meta[ 608 | 'installed_on'] is not None: 609 | continue 610 | out_state[version] = state[version].meta 611 | sys.stdout.write( 612 | json.dumps(out_state, indent=4, separators=(',', ': ')) + '\n') 613 | 614 | _finish(config) 615 | 616 | return state 617 | 618 | 619 | def clean(config): 620 | """ 621 | Drop schema_version table 622 | """ 623 | if _is_initialized(config.schema, config.cursor): 624 | LOG.info('dropping schema_version') 625 | config.cursor.execute( 626 | SQL('DROP TABLE {schema}.schema_version').format( 627 | schema=Identifier(config.schema))) 628 | LOG.info(config.cursor.statusmessage) 629 | LOG.info('dropping schema_version_type') 630 | config.cursor.execute( 631 | SQL('DROP TYPE {schema}.schema_version_type').format( 632 | schema=Identifier(config.schema))) 633 | LOG.info(config.cursor.statusmessage) 634 | _finish(config) 635 | 636 | 637 | def baseline(config): 638 | """ 639 | Set baseline cmdline wrapper 640 | """ 641 | if not _is_initialized(config.schema, config.cursor): 642 | _init_schema(config.schema, config.cursor) 643 | _set_baseline(config.baseline, config.user, config.schema, config.cursor) 644 | 645 | _finish(config) 646 | 647 | 648 | def _prepare_nontransactional_steps(state, callbacks): 649 | steps = [] 650 | i = {'state': {}, 'cbs': _get_callbacks('')} 651 | for version in sorted(state): 652 | if not state[version].meta['transactional']: 653 | if i['state']: 654 | steps.append(i) 655 | i = {'state': {}, 'cbs': _get_callbacks('')} 656 | elif not steps: 657 | LOG.error('First migration MUST be transactional') 658 | raise MalformedMigration('First migration MUST ' 659 | 'be transactional') 660 | steps.append({ 661 | 'state': { 662 | version: state[version], 663 | }, 664 | 'cbs': _get_callbacks(''), 665 | }) 666 | else: 667 | i['state'][version] = state[version] 668 | i['cbs'] = callbacks 669 | 670 | if i['state']: 671 | steps.append(i) 672 | 673 | transactional = [] 674 | for (num, step) in enumerate(steps): 675 | if list(step['state'].values())[0].meta['transactional']: 676 | transactional.append(num) 677 | 678 | if len(transactional) > 1: 679 | for num in transactional[1:]: 680 | steps[num]['cbs'] = steps[num]['cbs']._replace(beforeAll=[]) 681 | for num in transactional[:-1]: 682 | steps[num]['cbs'] = steps[num]['cbs']._replace(afterAll=[]) 683 | 684 | LOG.info('Initialization plan result:\n %s', 685 | json.dumps(steps, indent=4, separators=(',', ': '))) 686 | 687 | return steps 688 | 689 | 690 | def _execute_mixed_steps(config, steps, nt_conn): 691 | commit_req = False 692 | for step in steps: 693 | if commit_req: 694 | config.cursor.execute('commit') 695 | commit_req = False 696 | if not list(step['state'].values())[0].meta['transactional']: 697 | cur = _init_cursor(nt_conn, config.session) 698 | else: 699 | cur = config.cursor 700 | commit_req = True 701 | _migrate_step(step['state'], step['cbs'], config.user, config.schema, 702 | config.set_version_info_after_callbacks, cur) 703 | 704 | 705 | def _schema_check(schema, cursor): 706 | """ 707 | Check that only one schema used in migrations 708 | """ 709 | cursor.execute( 710 | 'SELECT n.nspname, c.relname FROM pg_locks l JOIN pg_class c ON ' 711 | '(l.relation=c.oid) JOIN pg_namespace n ON ' 712 | '(c.relnamespace=n.oid) WHERE l.pid = pg_backend_pid() ' 713 | "AND n.nspname !~ '^pg_' AND n.nspname <> 'information_schema'") 714 | 715 | unexpected = set() 716 | for namespace, relation in cursor.fetchall(): 717 | if namespace != schema: 718 | unexpected.add('.'.join((namespace, relation))) 719 | 720 | if unexpected: 721 | raise MigrateError( 722 | 'Unexpected relations used in migrations: {used}'.format( 723 | used=(', '.join(sorted(unexpected))))) 724 | 725 | 726 | def _check_serial_versions(state, not_applied): 727 | """ 728 | Check that there are no gaps in migration versions 729 | """ 730 | applied = [x for x in state if state[x].meta['installed_on'] is not None] 731 | sorted_versions = sorted(not_applied) 732 | if applied: 733 | sorted_versions.insert(0, max(applied)) 734 | first = sorted_versions[0] 735 | last = sorted_versions[-1] 736 | if last - first + 1 != len(sorted_versions): 737 | versions = set(sorted_versions) 738 | missing = [str(x) for x in range(first, last) if x not in versions] 739 | raise MigrateError( 740 | 'Migration versions have gaps: missing versions {versions}'.format( 741 | versions=', '.join(missing))) 742 | 743 | 744 | def migrate(config): 745 | """ 746 | Migrate cmdline wrapper 747 | """ 748 | if config.target is None: 749 | LOG.error('Unknown target (you could use "latest" to ' 750 | 'use latest available version)') 751 | raise MigrateError('Unknown target') 752 | 753 | state = _get_state(config.base_dir, config.baseline, config.target, 754 | config.schema, config.cursor) 755 | not_applied = [x for x in state if state[x].meta['installed_on'] is None] 756 | non_trans = [x for x in not_applied if not state[x].meta['transactional']] 757 | 758 | if not_applied and config.check_serial_versions: 759 | _check_serial_versions(state, not_applied) 760 | 761 | if non_trans: 762 | if not config.disable_schema_check: 763 | raise MigrateError( 764 | 'Schema check is not available for nontransactional ' 765 | 'migrations') 766 | if config.dryrun: 767 | LOG.error('Dry run for nontransactional migrations is nonsense') 768 | raise MigrateError('Dry run for nontransactional migrations ' 769 | 'is nonsense') 770 | if len(state) != len(not_applied): 771 | if len(not_applied) != len(non_trans): 772 | LOG.error('Unable to mix transactional and ' 773 | 'nontransactional migrations') 774 | raise MigrateError('Unable to mix transactional and ' 775 | 'nontransactional migrations') 776 | config.cursor.execute('rollback') 777 | with closing(_create_connection(config)) as nt_conn: 778 | nt_conn.autocommit = True 779 | cursor = _init_cursor(nt_conn, config.session) 780 | _migrate_step(state, _get_callbacks(''), config.user, 781 | config.schema, 782 | config.set_version_info_after_callbacks, cursor) 783 | if config.terminator_instance: 784 | config.terminator_instance.remove_conn(nt_conn) 785 | else: 786 | steps = _prepare_nontransactional_steps(state, config.callbacks) 787 | 788 | with closing(_create_connection(config)) as nt_conn: 789 | nt_conn.autocommit = True 790 | 791 | _execute_mixed_steps(config, steps, nt_conn) 792 | 793 | if config.terminator_instance: 794 | config.terminator_instance.remove_conn(nt_conn) 795 | else: 796 | _migrate_step(state, config.callbacks, config.user, config.schema, 797 | config.set_version_info_after_callbacks, config.cursor) 798 | if not config.disable_schema_check: 799 | _schema_check(config.schema, config.cursor) 800 | 801 | _finish(config) 802 | 803 | 804 | COMMANDS = { 805 | 'info': info, 806 | 'clean': clean, 807 | 'baseline': baseline, 808 | 'migrate': migrate, 809 | } 810 | 811 | CONFIG_DEFAULTS = Config(target=None, 812 | baseline=0, 813 | cursor=None, 814 | dryrun=False, 815 | callbacks='', 816 | base_dir='', 817 | user=None, 818 | session=['SET lock_timeout = 0'], 819 | conn='dbname=postgres user=postgres ' 820 | 'connect_timeout=1', 821 | conn_instance=None, 822 | terminator_instance=None, 823 | termination_interval=None, 824 | schema=None, 825 | disable_schema_check=False, 826 | check_serial_versions=False, 827 | set_version_info_after_callbacks=False, 828 | show_only_unapplied=False) 829 | 830 | 831 | def get_config(base_dir, args=None): 832 | """ 833 | Load configuration from yml in base dir with respect of args 834 | """ 835 | path = os.path.join(base_dir, 'migrations.yml') 836 | try: 837 | with codecs.open(path, encoding='utf-8') as i: 838 | base = yaml.safe_load(i) or {} 839 | except IOError: 840 | LOG.info('Unable to load %s. Using defaults', path) 841 | base = {} 842 | 843 | conf = CONFIG_DEFAULTS 844 | for i in [j for j in CONFIG_DEFAULTS._fields if j not in CONFIG_IGNORE]: 845 | if i in base: 846 | conf = conf._replace(**{i: base[i]}) 847 | if args is not None: 848 | if i in args.__dict__ and args.__dict__[i] is not None: 849 | conf = conf._replace(**{i: args.__dict__[i]}) 850 | 851 | if conf.target is not None: 852 | if conf.target == 'latest': 853 | conf = conf._replace(target=float('inf')) 854 | else: 855 | conf = conf._replace(target=int(conf.target)) 856 | 857 | if conf.termination_interval and not conf.dryrun: 858 | conf = conf._replace(terminator_instance=ConflictTerminator( 859 | conf.conn, conf.termination_interval)) 860 | conf.terminator_instance.start() 861 | 862 | conf = conf._replace(conn_instance=_create_connection(conf)) 863 | conf = conf._replace(cursor=_init_cursor(conf.conn_instance, conf.session)) 864 | conf = conf._replace( 865 | callbacks=_get_callbacks(conf.callbacks, conf.base_dir)) 866 | 867 | if conf.user is None: 868 | conf = conf._replace(user=_get_database_user(conf.cursor)) 869 | elif not conf.user: 870 | raise ConfigurationError('Empty user name') 871 | if conf.schema is None: 872 | conf = conf._replace(schema='public') 873 | conf = conf._replace(disable_schema_check=True) 874 | 875 | return conf 876 | 877 | 878 | def _main(): 879 | """ 880 | Main function 881 | """ 882 | parser = argparse.ArgumentParser() 883 | 884 | parser.add_argument('cmd', 885 | choices=COMMANDS.keys(), 886 | type=str, 887 | help='Operation') 888 | parser.add_argument('-t', '--target', type=str, help='Target version') 889 | parser.add_argument('-c', 890 | '--conn', 891 | type=str, 892 | help='Postgresql connection string') 893 | parser.add_argument('-d', 894 | '--base_dir', 895 | type=str, 896 | default='', 897 | help='Migrations base dir') 898 | parser.add_argument('-u', 899 | '--user', 900 | type=str, 901 | help='Override database user in migration info') 902 | parser.add_argument('-b', '--baseline', type=int, help='Baseline version') 903 | parser.add_argument('-a', 904 | '--callbacks', 905 | type=str, 906 | help='Comma-separated list of callbacks ' 907 | '(type:dir/file)') 908 | parser.add_argument('-s', 909 | '--session', 910 | action='append', 911 | help='Session setup (e.g. isolation level)') 912 | parser.add_argument('-n', 913 | '--dryrun', 914 | action='store_true', 915 | help='Say "rollback" in the end instead of "commit"') 916 | parser.add_argument('-l', 917 | '--termination_interval', 918 | type=float, 919 | help='Interval for terminating blocking pids') 920 | parser.add_argument('-m', '--schema', type=str, help='Operate on schema') 921 | parser.add_argument('--disable_schema_check', 922 | action='store_true', 923 | help='Do not check that all changes ' 924 | 'are in selected schema') 925 | parser.add_argument('--check_serial_versions', 926 | action='store_true', 927 | help='Check that there are no gaps ' 928 | 'in migration versions') 929 | parser.add_argument('-o', 930 | '--show_only_unapplied', 931 | action='store_true', 932 | help='Show only not applied migrations in info') 933 | parser.add_argument('-v', 934 | '--verbose', 935 | default=0, 936 | action='count', 937 | help='Be verbose') 938 | 939 | args = parser.parse_args() 940 | logging.basicConfig(level=(logging.ERROR - 10 * (min(3, args.verbose))), 941 | format='%(asctime)s %(levelname)-8s: %(message)s') 942 | 943 | config = get_config(args.base_dir, args) 944 | 945 | COMMANDS[args.cmd](config) 946 | 947 | 948 | if __name__ == '__main__': 949 | _main() 950 | -------------------------------------------------------------------------------- /run_test.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | set -e 4 | 5 | chown -R postgres:postgres /dist 6 | mkdir -p /var/log/postgresql 7 | chown postgres:postgres /var/log/postgresql 8 | sudo -u postgres /usr/lib/postgresql/${PG_MAJOR}/bin/pg_ctl -D \ 9 | /etc/postgresql/${PG_MAJOR}/main -l \ 10 | /var/log/postgresql/postgresql-${PG_MAJOR}-main.log start 11 | cd /dist 12 | sudo -u postgres -i tox -c /dist/tox.ini 13 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | """ 3 | setup.py for pgmigrate 4 | """ 5 | # encoding: utf-8 6 | # 7 | # Copyright (c) 2016-2025 Yandex LLC 8 | # Copyright (c) 2016-2025 Other contributors as noted in the AUTHORS file. 9 | # 10 | # Permission to use, copy, modify, and distribute this software and its 11 | # documentation for any purpose, without fee, and without a written 12 | # agreement is hereby granted, provided that the above copyright notice 13 | # and this paragraph and the following two paragraphs appear in all copies. 14 | # 15 | # IN NO EVENT SHALL YANDEX LLC BE LIABLE TO ANY PARTY FOR DIRECT, 16 | # INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES, INCLUDING LOST 17 | # PROFITS, ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, 18 | # EVEN IF YANDEX LLC HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 19 | # 20 | # YANDEX SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, BUT NOT 21 | # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A 22 | # PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS ON AN "AS IS" 23 | # BASIS, AND YANDEX LLC HAS NO OBLIGATIONS TO PROVIDE MAINTENANCE, 24 | # SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS. 25 | 26 | import sys 27 | 28 | try: 29 | from setuptools import setup 30 | except ImportError: 31 | from distutils import setup 32 | 33 | REQUIREMENTS = [ 34 | 'sqlparse >= 0.3.0', 35 | 'psycopg2 >= 2.8.2', 36 | 'PyYAML >= 5.2', 37 | ] 38 | 39 | if sys.version_info < (3, 0): 40 | REQUIREMENTS.append('future >= 0.17.1') 41 | 42 | with open('README.md', encoding='utf-8') as readme: 43 | long_description = readme.read() 44 | 45 | setup( 46 | name='yandex-pgmigrate', 47 | version='1.0.9', 48 | description='PostgreSQL migrations made easy', 49 | long_description=long_description, 50 | long_description_content_type='text/markdown', 51 | license='PostgreSQL License', 52 | url='https://github.com/yandex/pgmigrate/', 53 | author='Yandex LLC', 54 | author_email='opensource@yandex-team.ru', 55 | maintainer='Yandex LLC', 56 | maintainer_email='opensource@yandex-team.ru', 57 | zip_safe=False, 58 | platforms=['Linux', 'BSD', 'MacOS'], 59 | packages=['.'], 60 | entry_points={'console_scripts': [ 61 | 'pgmigrate = pgmigrate:_main', 62 | ]}, 63 | install_requires=REQUIREMENTS, 64 | ) 65 | -------------------------------------------------------------------------------- /tox.ini: -------------------------------------------------------------------------------- 1 | # Tox (http://tox.testrun.org/) is a tool for running tests 2 | # in multiple virtualenvs. This configuration file will run the 3 | # test suite on all supported python versions. To use it, "pip install tox" 4 | # and then run "tox" from this directory. 5 | 6 | [tox] 7 | envlist = py27, py311, flake8, pylint, yapf 8 | 9 | [testenv:py27] 10 | allowlist_externals = rm 11 | commands = rm -rf htmlcov 12 | coverage erase 13 | coverage run -p --include=pgmigrate.py {envbindir}/behave -q 14 | coverage combine 15 | coverage html pgmigrate.py 16 | coverage report --fail-under=100 pgmigrate.py 17 | deps = behave 18 | importlib 19 | coverage 20 | func_timeout 21 | future 22 | 23 | [testenv:py311] 24 | allowlist_externals = rm 25 | commands = rm -rf htmlcov 26 | coverage erase 27 | coverage run -p --include=pgmigrate.py {envbindir}/behave -q 28 | coverage combine 29 | coverage html pgmigrate.py 30 | coverage report --fail-under=100 pgmigrate.py 31 | deps = behave 32 | coverage 33 | func_timeout 34 | 35 | [testenv:{flake8,pylint,yapf}] 36 | envdir = {toxworkdir}/.lint_env 37 | commands = 38 | flake8: flake8 pgmigrate.py setup.py 39 | pylint: pylint pgmigrate.py 40 | yapf: yapf -pd pgmigrate.py 41 | deps = flake8==5.0.4 42 | flake8-string-format 43 | flake8-isort==5.0.0 44 | flake8-commas 45 | flake8-quotes 46 | flake8-copyright 47 | flake8-pep3101 48 | pylint 49 | yapf==0.40.2 50 | 51 | [flake8] 52 | copyright-check = True 53 | select = E,F,W,C 54 | copyright-regexp = Copyright\s+(\(C\)\s+)?(\d{4}-)?2016-2025\s+%(author)s 55 | copyright-author = Yandex LLC 56 | --------------------------------------------------------------------------------