├── src ├── __init__.py ├── unittest │ ├── __init__.py │ ├── utils │ │ ├── migrations │ │ │ ├── mysql │ │ │ │ ├── one │ │ │ │ │ ├── down.sql │ │ │ │ │ └── up.sql │ │ │ │ ├── three │ │ │ │ │ └── up.sql │ │ │ │ └── two │ │ │ │ │ └── up.sql │ │ │ └── postgresql │ │ │ │ ├── one │ │ │ │ ├── down.sql │ │ │ │ └── up.sql │ │ │ │ ├── three │ │ │ │ └── up.sql │ │ │ │ └── two │ │ │ │ └── up.sql │ │ ├── migrations_functional_test │ │ │ ├── mysql │ │ │ │ ├── 2018-12-15 │ │ │ │ │ └── up.sql │ │ │ │ ├── 2018-12-11 │ │ │ │ │ └── up.sql │ │ │ │ ├── 2018-12-14 │ │ │ │ │ └── up.sql │ │ │ │ ├── 2018-12-13 │ │ │ │ │ └── up.sql │ │ │ │ ├── 2018-12-12 │ │ │ │ │ └── up.sql │ │ │ │ └── 2018-12-10 │ │ │ │ │ └── up.sql │ │ │ └── postgresql │ │ │ │ ├── 2018-12-11 │ │ │ │ └── up.sql │ │ │ │ ├── 2018-12-13 │ │ │ │ └── up.sql │ │ │ │ ├── 2018-12-15 │ │ │ │ └── up.sql │ │ │ │ ├── 2018-12-12 │ │ │ │ └── up.sql │ │ │ │ ├── 2018-12-14 │ │ │ │ └── up.sql │ │ │ │ └── 2018-12-10 │ │ │ │ └── up.sql │ │ └── config │ │ │ ├── dbschema_empty_db.yml │ │ │ ├── dbschema_functional_test.yml │ │ │ └── dbschema.yml │ └── test_schema_change.py ├── __main__.py └── schema_change.py ├── MANIGEST.in ├── schema ├── postgresql.sql └── mysql.sql ├── .coveragerc ├── .gitignore ├── LICENSE ├── setup.py ├── dbschema_sample.yml ├── .github └── workflows │ └── ci.yml └── README.md /src/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /src/unittest/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /MANIGEST.in: -------------------------------------------------------------------------------- 1 | include LICENSE 2 | include README.md 3 | -------------------------------------------------------------------------------- /src/unittest/utils/migrations/mysql/one/down.sql: -------------------------------------------------------------------------------- 1 | DROP TABLE one; 2 | -------------------------------------------------------------------------------- /src/unittest/utils/migrations/postgresql/one/down.sql: -------------------------------------------------------------------------------- 1 | DROP TABLE one; 2 | -------------------------------------------------------------------------------- /src/__main__.py: -------------------------------------------------------------------------------- 1 | from . import schema_change 2 | 3 | if __name__ == '__main__': 4 | schema_change.main() 5 | -------------------------------------------------------------------------------- /src/unittest/utils/migrations_functional_test/mysql/2018-12-15/up.sql: -------------------------------------------------------------------------------- 1 | DROP TABLE t; 2 | 3 | DROP TABLE customer; 4 | -------------------------------------------------------------------------------- /schema/postgresql.sql: -------------------------------------------------------------------------------- 1 | 2 | CREATE TABLE migrations_applied ( 3 | id serial primary key, 4 | name text not null, 5 | date TIMESTAMP WITH TIME ZONE not null 6 | ); 7 | -------------------------------------------------------------------------------- /src/unittest/utils/migrations_functional_test/mysql/2018-12-11/up.sql: -------------------------------------------------------------------------------- 1 | CREATE INDEX part_of_name ON customer (name(10)); 2 | 3 | CREATE INDEX street ON customer (street); 4 | -------------------------------------------------------------------------------- /src/unittest/utils/migrations_functional_test/postgresql/2018-12-11/up.sql: -------------------------------------------------------------------------------- 1 | ALTER TABLE distributors RENAME COLUMN address TO city; 2 | 3 | ALTER TABLE distributors ALTER COLUMN street DROP NOT NULL; 4 | 5 | -------------------------------------------------------------------------------- /src/unittest/utils/migrations_functional_test/mysql/2018-12-14/up.sql: -------------------------------------------------------------------------------- 1 | CREATE FUNCTION hello (s CHAR(20)) 2 | RETURNS CHAR(50) DETERMINISTIC 3 | RETURN CONCAT('Hello, ',s,'!'); 4 | 5 | SELECT hello('world'); 6 | -------------------------------------------------------------------------------- /.coveragerc: -------------------------------------------------------------------------------- 1 | [run] 2 | source=dbschema 3 | 4 | [report] 5 | omit = 6 | setup.py 7 | src/__main__.py 8 | src/unittest/* 9 | venv/* 10 | exclude_lines = 11 | if __name__ == "__main__": 12 | main() 13 | -------------------------------------------------------------------------------- /schema/mysql.sql: -------------------------------------------------------------------------------- 1 | 2 | CREATE TABLE migrations_applied ( 3 | id int NOT NULL AUTO_INCREMENT, 4 | name varchar(256) not null, 5 | date datetime not null, 6 | PRIMARY KEY (id) 7 | ) ENGINE=InnoDB DEFAULT CHARSET=utf8; 8 | -------------------------------------------------------------------------------- /src/unittest/utils/migrations_functional_test/postgresql/2018-12-13/up.sql: -------------------------------------------------------------------------------- 1 | CREATE FUNCTION add(integer, integer) RETURNS integer 2 | AS 'select $1 + $2' 3 | LANGUAGE SQL 4 | IMMUTABLE 5 | RETURNS NULL ON NULL INPUT; 6 | -------------------------------------------------------------------------------- /src/unittest/utils/migrations_functional_test/postgresql/2018-12-15/up.sql: -------------------------------------------------------------------------------- 1 | CREATE FUNCTION dup(in int, out f1 int, out f2 text) 2 | AS $$ SELECT $1, CAST($1 AS text) || ' is text' $$ 3 | LANGUAGE SQL; 4 | 5 | SELECT * FROM dup(42); 6 | -------------------------------------------------------------------------------- /src/unittest/utils/migrations/mysql/one/up.sql: -------------------------------------------------------------------------------- 1 | 2 | CREATE TABLE one ( 3 | id int NOT NULL AUTO_INCREMENT, 4 | name varchar(256) not null, 5 | date datetime not null, 6 | PRIMARY KEY (id) 7 | ) ENGINE=InnoDB DEFAULT CHARSET=utf8; 8 | -------------------------------------------------------------------------------- /src/unittest/utils/migrations/mysql/three/up.sql: -------------------------------------------------------------------------------- 1 | 2 | CREATE TABLE two ( 3 | id int NOT NULL AUTO_INCREMENT, 4 | name varchar(256) not null, 5 | date datetime not null, 6 | PRIMARY KEY (id) 7 | ) ENGINE=InnoDB DEFAULT CHARSET=utf8; 8 | -------------------------------------------------------------------------------- /src/unittest/utils/migrations/mysql/two/up.sql: -------------------------------------------------------------------------------- 1 | 2 | CREATE TABLE three ( 3 | id int NOT NULL AUTO_INCREMENT, 4 | name varchar(256) not null, 5 | date datetime not null, 6 | PRIMARY KEY (id) 7 | ) ENGINE=InnoDB DEFAULT CHARSET=utf8; 8 | -------------------------------------------------------------------------------- /src/unittest/utils/migrations/postgresql/one/up.sql: -------------------------------------------------------------------------------- 1 | 2 | CREATE TABLE one ( 3 | id serial primary key, 4 | name text not null, 5 | date TIMESTAMP WITH TIME ZONE not null 6 | ); 7 | 8 | CREATE UNIQUE INDEX ON migrations_applied (id); 9 | -------------------------------------------------------------------------------- /src/unittest/utils/migrations/postgresql/three/up.sql: -------------------------------------------------------------------------------- 1 | 2 | CREATE TABLE two ( 3 | id serial primary key, 4 | name text not null, 5 | date TIMESTAMP WITH TIME ZONE not null 6 | ); 7 | 8 | CREATE UNIQUE INDEX ON migrations_applied (id); 9 | -------------------------------------------------------------------------------- /src/unittest/utils/migrations/postgresql/two/up.sql: -------------------------------------------------------------------------------- 1 | 2 | CREATE TABLE three ( 3 | id serial primary key, 4 | name text not null, 5 | date TIMESTAMP WITH TIME ZONE not null 6 | ); 7 | 8 | CREATE UNIQUE INDEX ON migrations_applied (id); 9 | -------------------------------------------------------------------------------- /src/unittest/utils/migrations_functional_test/mysql/2018-12-13/up.sql: -------------------------------------------------------------------------------- 1 | delimiter // 2 | 3 | CREATE PROCEDURE simpleproc (OUT param1 INT) 4 | BEGIN 5 | SELECT COUNT(*) INTO param1 FROM t; 6 | END// 7 | 8 | delimiter ; 9 | 10 | CALL simpleproc(@a); 11 | -------------------------------------------------------------------------------- /src/unittest/utils/migrations_functional_test/postgresql/2018-12-12/up.sql: -------------------------------------------------------------------------------- 1 | -- Disabled, returns: 2 | -- psycopg2.errors.ActiveSqlTransaction: CREATE INDEX CONCURRENTLY cannot run inside a transaction block 3 | 4 | -- CREATE UNIQUE INDEX CONCURRENTLY dist_id_temp_idx ON distributors (dist_id); 5 | 6 | SELECT 1; -------------------------------------------------------------------------------- /src/unittest/utils/migrations_functional_test/mysql/2018-12-12/up.sql: -------------------------------------------------------------------------------- 1 | CREATE TABLE t1 ( 2 | col1 VARCHAR(10), 3 | col2 VARCHAR(20), 4 | col3 VARCHAR(20), 5 | INDEX (col1, col2(10)) 6 | ); 7 | 8 | CREATE INDEX idx1 ON t1 (col1); 9 | CREATE INDEX idx2 ON t1 (col2); 10 | ALTER TABLE t1 ADD INDEX (col3 DESC); 11 | -------------------------------------------------------------------------------- /src/unittest/utils/migrations_functional_test/mysql/2018-12-10/up.sql: -------------------------------------------------------------------------------- 1 | CREATE TABLE t (c CHAR(20) CHARACTER SET utf8 COLLATE utf8_bin); 2 | 3 | CREATE TABLE test ( 4 | blob_col BLOB, 5 | INDEX(blob_col(10)) 6 | ); 7 | 8 | CREATE TABLE customer ( 9 | id int, 10 | name varchar(255), 11 | street varchar(255), 12 | primary key (id) 13 | ); -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Python 2 | __pycache__/ 3 | *.py[cod] 4 | .Python 5 | *.egg-info 6 | build/ 7 | dist/ 8 | venv/ 9 | .pytest_cache 10 | 11 | # Codecov 12 | .coverage 13 | coverage.xml 14 | 15 | # Mac 16 | .DS_Store 17 | 18 | # tests 19 | src/unittest/utils/config/dbschema2.yml 20 | src/unittest/utils/config/dbschema_empty_db2.yml 21 | 22 | # Misc 23 | tests/ 24 | local/ 25 | -------------------------------------------------------------------------------- /src/unittest/utils/migrations_functional_test/postgresql/2018-12-14/up.sql: -------------------------------------------------------------------------------- 1 | CREATE OR REPLACE FUNCTION increment(i integer) RETURNS integer AS $$ 2 | BEGIN 3 | RETURN i + 1; 4 | END; 5 | $$ LANGUAGE plpgsql; 6 | 7 | CREATE OR REPLACE FUNCTION increment_2(i integer) RETURNS integer 8 | AS $$ 9 | BEGIN 10 | RETURN i + 2; 11 | END; 12 | $$ LANGUAGE plpgsql; 13 | -------------------------------------------------------------------------------- /src/unittest/utils/config/dbschema_empty_db.yml: -------------------------------------------------------------------------------- 1 | databases: 2 | tag_postgresql_empty: 3 | engine: postgresql 4 | host: localhost 5 | port: 5432 6 | user: db_user 7 | password: db_password 8 | db: my_empty_db 9 | path: src/unittest/utils/migrations/postgresql/ 10 | tag_mysql_empty: 11 | engine: mysql 12 | host: localhost 13 | port: 3306 14 | user: root 15 | password: root 16 | db: my_empty_db 17 | path: src/unittest/utils/migrations/mysql/ -------------------------------------------------------------------------------- /src/unittest/utils/migrations_functional_test/postgresql/2018-12-10/up.sql: -------------------------------------------------------------------------------- 1 | CREATE TABLE films ( 2 | code char(5) CONSTRAINT firstkey PRIMARY KEY, 3 | title varchar(40) NOT NULL, 4 | did integer NOT NULL, 5 | date_prod date, 6 | kind varchar(10), 7 | len interval hour to minute 8 | ); 9 | 10 | CREATE TABLE distributors ( 11 | did serial PRIMARY key, 12 | name varchar(40) NOT NULL CHECK (name <> ''), 13 | address text, 14 | street text not null, 15 | dist_id int 16 | ); 17 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2017 Gabriel Bordeaux 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | from setuptools import setup 2 | 3 | import pypandoc 4 | 5 | setup( 6 | name='dbschema', 7 | version='1.4.3', 8 | description='Schema migration made easy', 9 | long_description=pypandoc.convert_file('README.md', 'rst'), 10 | author='Gabriel Bordeaux', 11 | author_email='pypi@gab.lc', 12 | url='https://github.com/gabfl/dbschema', 13 | license='MIT', 14 | packages=['dbschema'], 15 | package_dir={'dbschema': 'src'}, 16 | install_requires=['argparse', 'PyYAML', 'pymysql', 17 | 'psycopg2-binary'], # external dependencies 18 | entry_points={ 19 | 'console_scripts': [ 20 | 'dbschema = dbschema.schema_change:main', 21 | ], 22 | }, 23 | classifiers=[ # see https://pypi.python.org/pypi?%3Aaction=list_classifiers 24 | 'Topic :: Database', 25 | 'Topic :: Database :: Database Engines/Servers', 26 | 'License :: OSI Approved :: MIT License', 27 | 'Operating System :: MacOS', 28 | 'Operating System :: POSIX :: Linux', 29 | 'Natural Language :: English', 30 | # 'Programming Language :: Python :: 2.7', 31 | 'Programming Language :: Python :: 3', 32 | 'Programming Language :: Python', 33 | 'Development Status :: 4 - Beta', 34 | # 'Development Status :: 5 - Production/Stable', 35 | ], 36 | ) 37 | -------------------------------------------------------------------------------- /dbschema_sample.yml: -------------------------------------------------------------------------------- 1 | databases: 2 | db1: # Unique tag 3 | engine: postgresql # Engine name (`postgresql` pr `mysql`) 4 | host: 127.0.0.1 # Database host 5 | port: 5432 # Database port 6 | user: gab # Username 7 | password: azerty # Optional password 8 | db: my_db # Database name 9 | # sslmode: require 10 | # sslcert: /path/to/client-cert.pem 11 | # sslkey: /path/to/client-key.pem 12 | # sslrootcert: /etc/ssl/certs/your-ca-cert.pem 13 | # sslcrl: /etc/ssl/certs/crl.pem # Optional SSL certificate revocation list 14 | # sslcompression: 1 15 | path: /path/to/migrations/ # Path to the migration folder 16 | pre_migration: '' # Optional queries ran before migrating 17 | post_migration: 'GRANT ALL PRIVILEGES ON ALL TABLES IN SCHEMA public TO gab; GRANT ALL PRIVILEGES ON ALL SEQUENCES IN SCHEMA public TO gab' # Optional queries ran after migrating 18 | db2: 19 | engine: mysql 20 | host: 127.0.0.1 21 | port: 3306 22 | user: gab 23 | password: azerty 24 | db: my_db 25 | # ssl_check_hostname: true # set to false to disable hostname checking of the server cert 26 | # ssl_cert: /path/to/client-cert.pem 27 | # ssl_key: /path/to/client-key.pem 28 | # ssl_ca: /etc/ssl/certs/your-ca-cert.pem 29 | # ssl_cipher: 'cipher-name' 30 | # ssl_capath: /path/to/ca-certs/ 31 | path: /path/to/migrations/ 32 | pre_migration: '' 33 | post_migration: '' 34 | -------------------------------------------------------------------------------- /src/unittest/utils/config/dbschema_functional_test.yml: -------------------------------------------------------------------------------- 1 | databases: 2 | tag_postgresql: # Unique tag 3 | engine: postgresql # Engine name (`postgresql` pr `mysql`) 4 | host: localhost # Database host 5 | port: 5432 # Database port 6 | user: db_user # Username 7 | password: db_password # Optional password 8 | db: my_db_functional_test # Database name 9 | # sslmode: require 10 | # sslcert: /path/to/client-cert.pem 11 | # sslkey: /path/to/client-key.pem 12 | # sslrootcert: /etc/ssl/certs/your-ca-cert.pem 13 | # sslcrl: /etc/ssl/certs/crl.pem # Optional SSL certificate revocation list 14 | sslcompression: 1 15 | path: src/unittest/utils/migrations_functional_test/postgresql/ # Path to the migration folder 16 | pre_migration: 'SELECT 1;' # Optional queries ran before migrating 17 | post_migration: 'GRANT ALL PRIVILEGES ON ALL TABLES IN SCHEMA public TO postgres; GRANT ALL PRIVILEGES ON ALL SEQUENCES IN SCHEMA public TO postgres' # Optional queries ran after migrating 18 | tag_mysql: 19 | engine: mysql 20 | host: localhost 21 | port: 3306 22 | user: root 23 | password: root 24 | db: my_db_functional_test 25 | # ssl_check_hostname: true # set to false to disable hostname checking of the server cert 26 | # ssl_cert: /path/to/client-cert.pem 27 | # ssl_key: /path/to/client-key.pem 28 | # ssl_ca: /etc/ssl/certs/your-ca-cert.pem 29 | # ssl_cipher: 'cipher-name' 30 | # ssl_capath: /path/to/ca-certs/ 31 | path: src/unittest/utils/migrations_functional_test/mysql/ 32 | 33 | -------------------------------------------------------------------------------- /src/unittest/utils/config/dbschema.yml: -------------------------------------------------------------------------------- 1 | databases: 2 | tag_postgresql: # Unique tag 3 | engine: postgresql # Engine name (`postgresql` pr `mysql`) 4 | host: localhost # Database host 5 | port: 5432 # Database port 6 | user: db_user # Username 7 | password: db_password # Optional password 8 | db: my_db # Database name 9 | # sslmode: require 10 | # sslcert: /path/to/client-cert.pem 11 | # sslkey: /path/to/client-key.pem 12 | # sslrootcert: /etc/ssl/certs/your-ca-cert.pem 13 | # sslcrl: /etc/ssl/certs/crl.pem # Optional SSL certificate revocation list 14 | sslcompression: 1 15 | path: src/unittest/utils/migrations/postgresql/ # Path to the migration folder 16 | pre_migration: 'SELECT 1;' # Optional queries ran before migrating 17 | post_migration: 'GRANT ALL PRIVILEGES ON ALL TABLES IN SCHEMA public TO postgres; GRANT ALL PRIVILEGES ON ALL SEQUENCES IN SCHEMA public TO postgres' # Optional queries ran after migrating 18 | tag_mysql: 19 | engine: mysql 20 | host: localhost 21 | port: 3306 22 | user: root 23 | password: root 24 | db: my_db 25 | # ssl_check_hostname: true # set to false to disable hostname checking of the server cert 26 | # ssl_cert: /path/to/client-cert.pem 27 | # ssl_key: /path/to/client-key.pem 28 | # ssl_ca: /etc/ssl/certs/your-ca-cert.pem 29 | # ssl_cipher: 'cipher-name' 30 | # ssl_capath: /path/to/ca-certs/ 31 | path: src/unittest/utils/migrations/mysql/ 32 | tag_mysql_wrong_path: 33 | engine: mysql 34 | host: localhost 35 | port: 3306 36 | user: root 37 | password: root 38 | db: my_db 39 | path: src/unittest/utils/migrations/non_existent/ 40 | 41 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: CI 2 | 3 | on: [push, pull_request] 4 | 5 | jobs: 6 | build: 7 | runs-on: ubuntu-latest 8 | strategy: 9 | matrix: 10 | python-version: ['3.7', '3.8', '3.9'] 11 | 12 | steps: 13 | - uses: actions/checkout@v2 14 | - name: Set up Python 15 | uses: actions/setup-python@v2 16 | with: 17 | python-version: ${{ matrix.python-version }} 18 | architecture: x64 19 | - name: Display Python version 20 | run: python -c "import sys; print(sys.version)" 21 | - name: Set up MySQL 22 | run: | 23 | sudo systemctl start mysql.service 24 | mysql -e 'CREATE DATABASE my_db;' -uroot -proot 25 | mysql -e 'CREATE DATABASE my_db_functional_test;' -uroot -proot 26 | mysql -e 'CREATE DATABASE my_empty_db;' -uroot -proot 27 | - name: Import MySQL schemas 28 | run: | 29 | mysql my_db -uroot -proot < schema/mysql.sql 30 | mysql my_db_functional_test -uroot -proot < schema/mysql.sql 31 | - name: Start PostgreSQL 32 | run: | 33 | sudo systemctl start postgresql.service 34 | pg_isready 35 | - name: Create PostgreSQL user 36 | run: | 37 | sudo -u postgres psql --command="CREATE USER db_user PASSWORD 'db_password'" --command="\du" 38 | - name: Create PostgreSQL databases 39 | run: | 40 | sudo -u postgres createdb --owner=db_user my_db 41 | sudo -u postgres createdb --owner=db_user my_db_functional_test 42 | sudo -u postgres createdb --owner=db_user my_empty_db 43 | - name: Import PostgreSQL schemas 44 | run: | 45 | psql -h localhost -f schema/postgresql.sql -d my_db -U db_user 46 | psql -h localhost -f schema/postgresql.sql -d my_db_functional_test -U db_user 47 | env: 48 | PGPASSWORD: db_password 49 | - name: Install deb dependencies 50 | run: | 51 | sudo apt update 52 | sudo apt install pandoc 53 | - name: Install dependencies 54 | run: | 55 | pip install -U pip 56 | pip install pycodestyle coverage pytest pypandoc 57 | python setup.py install 58 | - name: Copy config file 59 | run: | 60 | cp src/unittest/utils/config/dbschema.yml ~/.dbschema.yml 61 | - name: Run dbschema 62 | run: | 63 | dbschema --config src/unittest/utils/config/dbschema_functional_test.yml 64 | - name: Run pycodestyle 65 | run: | 66 | pycodestyle --exclude=venv --ignore=E501 . 67 | - name: Run pytest 68 | run: | 69 | coverage run --source=. -m pytest 70 | - name: Generate coverage report 71 | run: | 72 | coverage report -m 73 | - name: Upload coverage reports to Codecov 74 | run: | 75 | curl -Os https://uploader.codecov.io/latest/linux/codecov 76 | chmod +x codecov 77 | ./codecov -t ${CODECOV_TOKEN} 78 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # dbschema 2 | 3 | [![Pypi](https://img.shields.io/pypi/v/dbschema.svg)](https://pypi.org/project/dbschema) 4 | [![Build Status](https://github.com/gabfl/dbschema/actions/workflows/ci.yml/badge.svg?branch=main)](https://github.com/gabfl/dbschema/actions) 5 | [![codecov](https://codecov.io/gh/gabfl/dbschema/branch/main/graph/badge.svg)](https://codecov.io/gh/gabfl/dbschema) 6 | [![MIT licensed](https://img.shields.io/badge/license-MIT-green.svg)](https://raw.githubusercontent.com/gabfl/dbschema/main/LICENSE) 7 | 8 | `dbschema` is a tool to run MySQL or PostgreSQL migrations automatically. Using a table, it keeps a state of previous migrations to avoid duplicates. 9 | 10 | Features: 11 | 12 | - Support for MySQL and PostgreSQL 13 | - Optional pre and post-migration queries (for example to update privileges) 14 | - Multiple migrations in multiple databases can be processed as one. 15 | 16 | ## Installation 17 | 18 | ### Install `dbschema` 19 | 20 | ```bash 21 | # Install required packages 22 | apt-get update 23 | apt-get install --yes libpq-dev gcc python3-dev 24 | 25 | pip3 install dbschema 26 | ``` 27 | 28 | ### Create a config file 29 | 30 | Create the file `~/.dbschema.yml` and add your databases configuration. [See example](dbschema_sample.yml) 31 | 32 | ### Create migrations table 33 | 34 | `dbschema` uses a table called `migrations_applied` to keep track of migrations already applied to avoid duplication. 35 | See the schema for [MySQL](schema/mysql.sql) or [PostgreSQL](schema/postgresql.sql). 36 | 37 | ## Migrations folder structure 38 | 39 | For each database, you need to have a migration path (setting `path` in the migration file). 40 | 41 | Within that path you need to create one folder per migration. This folder must contain a file called `up.sql` with the SQL queries and optionally a file called `down.sql` for rollbacks. 42 | 43 | ``` 44 | /path/to/migrations/db1/ 45 | |-- migration1/ 46 | | |-- up.sql 47 | | |-- down.sql 48 | |-- migration2/ 49 | | |-- up.sql 50 | |... 51 | /path/to/migrations/db2/ 52 | |-- migration1/ 53 | | |-- up.sql 54 | |-- migration2/ 55 | | |-- up.sql 56 | | |-- down.sql 57 | |... 58 | ``` 59 | 60 | ## Usage 61 | 62 | ### Apply pending migrations 63 | 64 | ```bash 65 | dbschema 66 | 67 | # or to specify a config file path 68 | dbschema --config /path/to/config.yml 69 | 70 | # or to migrate only a specific database 71 | dbschema --tag db1 72 | ``` 73 | 74 | ### Rollback 75 | 76 | ```bash 77 | dbschema --tag db1 --rollback migration1 78 | ``` 79 | 80 | ## Example 81 | 82 | ```bash 83 | $ dbschema 84 | * Applying migrations for db1 (`test` on postgresql) 85 | -> Migration `migration1` applied 86 | -> Migration `migration2` applied 87 | -> Migration `migration3` applied 88 | * Migrations applied 89 | * Applying migrations for db2 (`test` on mysql) 90 | -> Migration `migration1` applied 91 | -> Migration `migration2` applied 92 | -> Migration `migration3` applied 93 | * Migrations applied 94 | $ 95 | $ dbschema --tag db2 --rollback migration1 96 | * Rolling back mysql -> `migration1` 97 | -> Migration `migration1` has been rolled back 98 | $ 99 | ``` 100 | -------------------------------------------------------------------------------- /src/schema_change.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | import os 4 | import codecs 5 | from glob import glob 6 | 7 | import yaml 8 | import argparse 9 | import pymysql.cursors 10 | import pymysql.constants.CLIENT 11 | import psycopg2.extras 12 | import psycopg2 13 | 14 | 15 | def get_config(override=None): 16 | """ Get config file """ 17 | 18 | # Set location 19 | config_path = os.path.expanduser('~') + '/.dbschema.yml' 20 | if override: 21 | config_path = override 22 | 23 | # Check if the config file exists 24 | check_exists(config_path) 25 | 26 | # Load config 27 | with open(config_path) as f: 28 | # use safe_load instead load 29 | config = yaml.safe_load(f) 30 | 31 | return config 32 | 33 | 34 | def check_exists(path, type='file'): 35 | """ Check if a file or a folder exists """ 36 | 37 | if type == 'file': 38 | if not os.path.isfile(path): 39 | raise RuntimeError('The file `%s` does not exist.' % path) 40 | else: 41 | if not os.path.isdir(path): 42 | raise RuntimeError('The folder `%s` does not exist.' % path) 43 | 44 | return True 45 | 46 | 47 | def get_migrations_files(path): 48 | """ List migrations folders """ 49 | 50 | migrations = glob(path + '*/up.sql') 51 | migrations.sort() 52 | 53 | return migrations 54 | 55 | 56 | def add_slash(path): 57 | """ Ensure that the path ends with a slash """ 58 | 59 | if not path.endswith('/'): 60 | return path + '/' 61 | 62 | return path 63 | 64 | 65 | def get_migration_name(file): 66 | """ 67 | Returns the migration name, for example: 68 | `/path/to/migrations/migration1/up.sql` -> `migration1` 69 | """ 70 | 71 | return os.path.basename(os.path.dirname(file)) 72 | 73 | 74 | def get_migration_source(file): 75 | """ Returns migration source code """ 76 | 77 | with open(file, "r") as f: 78 | return f.read() 79 | 80 | 81 | def get_connection(engine, host, user, port, password, database, ssl={}): 82 | """ Returns a PostgreSQL or MySQL connection """ 83 | 84 | if engine == 'mysql': 85 | # Connection 86 | return get_mysql_connection(host, user, port, password, database, ssl) 87 | elif engine == 'postgresql': 88 | # Connection 89 | return get_pg_connection(host, user, port, password, database, ssl) 90 | else: 91 | raise RuntimeError('`%s` is not a valid engine.' % engine) 92 | 93 | 94 | def get_mysql_connection(host, user, port, password, database, ssl={}): 95 | """ MySQL connection """ 96 | 97 | return pymysql.connect(host=host, 98 | user=user, 99 | port=port, 100 | password=password, 101 | db=database, 102 | charset='utf8mb4', 103 | cursorclass=pymysql.cursors.DictCursor, 104 | client_flag=pymysql.constants.CLIENT.MULTI_STATEMENTS, 105 | ssl=ssl 106 | ) 107 | 108 | 109 | def get_pg_connection(host, user, port, password, database, ssl={}): 110 | """ PostgreSQL connection """ 111 | 112 | return psycopg2.connect(host=host, 113 | user=user, 114 | port=port, 115 | password=password, 116 | dbname=database, 117 | sslmode=ssl.get('sslmode'), 118 | sslcert=ssl.get('sslcert'), 119 | sslkey=ssl.get('sslkey'), 120 | sslrootcert=ssl.get('sslrootcert'), 121 | ) 122 | 123 | 124 | def parse_statements(queries_input, engine): 125 | """ Parse input and return a list of SQL statements """ 126 | 127 | queries = [] 128 | query = '' 129 | sql_delimiter = ';' 130 | 131 | # Possible delimiters used in PostgreSQL functions 132 | postgres_func_delimiters = ['$$', '##'] 133 | 134 | # Split input by lines 135 | lines = queries_input.strip().split('\n') 136 | 137 | for k, line in enumerate(lines): 138 | # Strip line 139 | line = line.strip() 140 | 141 | # Skip empty lines and comments 142 | if not line or line.startswith('--'): 143 | continue 144 | 145 | # Detect new SQL delimiter 146 | if engine == 'mysql' and line.upper().startswith('DELIMITER '): 147 | sql_delimiter = line.split()[1] 148 | continue 149 | elif engine == 'postgresql' and [delimiter for delimiter in postgres_func_delimiters if 'AS ' + delimiter in line.upper()]: 150 | sql_delimiter = line.split()[-1] 151 | 152 | # Ensure that we leave 'AS [DELIMITER]' 153 | query += line + '\n' 154 | 155 | continue 156 | 157 | # Statement is not finished 158 | if sql_delimiter not in line and k != len(lines) - 1: 159 | # Append line 160 | query += line + '\n' 161 | else: # Statement is finished 162 | # Replace non default delimiter 163 | if sql_delimiter != ';' and engine == 'mysql' and line.endswith(sql_delimiter): 164 | line = line.replace(sql_delimiter, ';') 165 | 166 | queries.append(query + line) 167 | query = '' 168 | 169 | return queries 170 | 171 | 172 | def run_migration(connection, queries, engine): 173 | """ Apply a migration to the SQL server """ 174 | 175 | # Execute query 176 | with connection.cursor() as cursorMig: 177 | # Parse statements 178 | queries = parse_statements(queries, engine) 179 | 180 | for query in queries: 181 | cursorMig.execute(query) 182 | connection.commit() 183 | 184 | return True 185 | 186 | 187 | def save_migration(connection, basename): 188 | """ Save a migration in `migrations_applied` table """ 189 | 190 | # Prepare query 191 | sql = "INSERT INTO migrations_applied (name, date) VALUES (%s, NOW())" 192 | 193 | # Run 194 | with connection.cursor() as cursor: 195 | cursor.execute(sql, (basename,)) 196 | connection.commit() 197 | 198 | return True 199 | 200 | 201 | def delete_migration(connection, basename): 202 | """ Delete a migration in `migrations_applied` table """ 203 | 204 | # Prepare query 205 | sql = "DELETE FROM migrations_applied WHERE name = %s" 206 | 207 | # Run 208 | with connection.cursor() as cursor: 209 | cursor.execute(sql, (basename,)) 210 | connection.commit() 211 | 212 | return True 213 | 214 | 215 | def is_applied(migrations_applied, migration_name): 216 | """ Check if a migration we want to run is already in the list of applied migrations """ 217 | 218 | return [True for migration in migrations_applied if migration['name'] == migration_name] 219 | 220 | 221 | def get_migrations_applied(engine, connection): 222 | """ Get list of migrations already applied """ 223 | 224 | try: 225 | # Get cursor based on engine 226 | if engine == 'postgresql': 227 | cursor = connection.cursor( 228 | cursor_factory=psycopg2.extras.RealDictCursor) 229 | else: 230 | cursor = connection.cursor() 231 | 232 | sql = "SELECT id, name, date FROM migrations_applied" 233 | cursor.execute(sql) 234 | rows = cursor.fetchall() 235 | # print (rows); 236 | return rows 237 | except psycopg2.ProgrammingError: 238 | raise RuntimeError( 239 | 'The table `migrations_applied` is missing. Please refer to the project documentation at https://github.com/gabfl/dbschema.') 240 | except pymysql.err.ProgrammingError: 241 | raise RuntimeError( 242 | 'The table `migrations_applied` is missing. Please refer to the project documentation at https://github.com/gabfl/dbschema.') 243 | 244 | 245 | def apply_migrations(engine, connection, path): 246 | """ Apply all migrations in a chronological order """ 247 | 248 | # Get migrations applied 249 | migrations_applied = get_migrations_applied(engine, connection) 250 | # print(migrationsApplied) 251 | 252 | # Get migrations folder 253 | for file in get_migrations_files(path): 254 | # Set vars 255 | basename = os.path.basename(os.path.dirname(file)) 256 | 257 | # Skip migrations if they are already applied 258 | if is_applied(migrations_applied, basename): 259 | continue 260 | 261 | # Get migration source 262 | source = get_migration_source(file) 263 | # print (source); 264 | 265 | # Run migration 266 | run_migration(connection, source, engine) 267 | 268 | # Save migration 269 | save_migration(connection, basename) 270 | 271 | # Log 272 | print(' -> Migration `%s` applied' % (basename)) 273 | 274 | # Log 275 | print(' * Migrations applied') 276 | 277 | return True 278 | 279 | 280 | def rollback_migration(engine, connection, path, migration_to_rollback): 281 | """ Rollback a migration """ 282 | 283 | # Get migrations applied 284 | migrations_applied = get_migrations_applied(engine, connection) 285 | 286 | # Ensure that the migration was previously applied 287 | if not is_applied(migrations_applied, migration_to_rollback): 288 | raise RuntimeError( 289 | '`%s` is not in the list of previously applied migrations.' % (migration_to_rollback)) 290 | 291 | # Rollback file 292 | file = path + migration_to_rollback + '/down.sql' 293 | 294 | # Ensure that the file exists 295 | check_exists(file) 296 | 297 | # Set vars 298 | basename = os.path.basename(os.path.dirname(file)) 299 | 300 | # Get migration source 301 | source = get_migration_source(file) 302 | # print (source); 303 | 304 | # Run migration rollback 305 | run_migration(connection, source, engine) 306 | 307 | # Delete migration 308 | delete_migration(connection, basename) 309 | 310 | # Log 311 | print(' -> Migration `%s` has been rolled back' % (basename)) 312 | 313 | return True 314 | 315 | 316 | def get_ssl(database): 317 | """ Returns SSL options for the selected engine """ 318 | 319 | # Set available keys per engine 320 | if database['engine'] == 'postgresql': 321 | keys = ['sslmode', 'sslcert', 'sslkey', 322 | 'sslrootcert', 'sslcrl', 'sslcompression'] 323 | else: 324 | keys = ['ssl_ca', 'ssl_capath', 'ssl_cert', 'ssl_key', 325 | 'ssl_cipher', 'ssl_check_hostname'] 326 | 327 | # Loop thru keys 328 | ssl = {} 329 | for key in keys: 330 | value = database.get(key) 331 | if value is not None: 332 | ssl[key] = value 333 | 334 | return ssl 335 | 336 | 337 | def apply(config_override=None, tag_override=None, rollback=None, skip_missing=None): 338 | """ Look thru migrations and apply them """ 339 | 340 | # Load config 341 | config = get_config(config_override) 342 | databases = config['databases'] 343 | 344 | # If we are rolling back, ensure that we have a database tag 345 | if rollback and not tag_override: 346 | raise RuntimeError( 347 | 'To rollback a migration you need to specify the database tag with `--tag`') 348 | 349 | for tag in sorted(databases): 350 | # If a tag is specified, skip other tags 351 | if tag_override and tag_override != tag: 352 | continue 353 | 354 | # Set vars 355 | engine = databases[tag].get('engine', 'mysql') 356 | host = databases[tag].get('host', 'localhost') 357 | port = databases[tag].get('port', 3306) 358 | user = databases[tag]['user'] 359 | password = databases[tag].get('password') 360 | db = databases[tag]['db'] 361 | path = add_slash(databases[tag]['path']) 362 | pre_migration = databases[tag].get('pre_migration') 363 | post_migration = databases[tag].get('post_migration') 364 | 365 | # Check if the migration path exists 366 | if skip_missing: 367 | try: 368 | check_exists(path, 'dir') 369 | except RuntimeError: 370 | continue 371 | else: 372 | check_exists(path, 'dir') 373 | 374 | # Get database connection 375 | connection = get_connection( 376 | engine, host, user, port, password, db, get_ssl(databases[tag])) 377 | 378 | # Run pre migration queries 379 | if pre_migration: 380 | run_migration(connection, pre_migration, engine) 381 | 382 | if rollback: 383 | print(' * Rolling back %s (`%s` on %s)' % (tag, db, engine)) 384 | 385 | rollback_migration(engine, connection, path, rollback) 386 | else: 387 | print(' * Applying migrations for %s (`%s` on %s)' % 388 | (tag, db, engine)) 389 | 390 | apply_migrations(engine, connection, path) 391 | 392 | # Run post migration queries 393 | if post_migration: 394 | run_migration(connection, post_migration, engine) 395 | 396 | return True 397 | 398 | 399 | def main(): 400 | # Parse arguments 401 | parser = argparse.ArgumentParser() 402 | parser.add_argument("-c", "--config", type=str, 403 | help="Config file location (default: ~/.dbschema.yml)") 404 | parser.add_argument("-t", "--tag", type=str, help="Database tag") 405 | parser.add_argument("-r", "--rollback", type=str, 406 | help="Rollback a migration") 407 | parser.add_argument("-s", "--skip_missing", action='store_true', 408 | help="Skip missing migration folders") 409 | args = parser.parse_args() 410 | 411 | apply(args.config, args.tag, args.rollback, args.skip_missing) 412 | 413 | 414 | if __name__ == "__main__": 415 | main() 416 | -------------------------------------------------------------------------------- /src/unittest/test_schema_change.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | import psycopg2 3 | import pymysql 4 | import datetime 5 | 6 | from .. import schema_change 7 | 8 | 9 | class Test(unittest.TestCase): 10 | 11 | config_path = 'src/unittest/utils/config/dbschema.yml' 12 | config_path_empty_db = 'src/unittest/utils/config/dbschema_empty_db.yml' 13 | 14 | def test_get_config(self): 15 | config = schema_change.get_config(self.config_path) 16 | 17 | self.assertIsInstance(config, dict) 18 | 19 | def test_get_config_2(self): 20 | # Test loading from home directory 21 | config = schema_change.get_config() 22 | 23 | self.assertIsInstance(config, dict) 24 | 25 | def test_check_exists(self): 26 | self.assertTrue(schema_change.check_exists( 27 | 'src/unittest/utils/migrations/mysql/one/up.sql')) 28 | self.assertTrue(schema_change.check_exists( 29 | 'src/unittest/utils/migrations/mysql/one/', 'dir')) 30 | 31 | # Check exceptions for non existent 32 | self.assertRaises(RuntimeError, schema_change.check_exists, 33 | 'src/unittest/utils/non_existent') 34 | self.assertRaises(RuntimeError, schema_change.check_exists, 35 | 'src/unittest/utils/non_existent', 'dir') 36 | 37 | def test_get_migrations_files(self): 38 | migration_files = schema_change.get_migrations_files( 39 | 'src/unittest/utils/migrations/mysql/') 40 | 41 | self.assertTrue( 42 | 'src/unittest/utils/migrations/mysql/one/up.sql' in migration_files) 43 | self.assertTrue( 44 | 'src/unittest/utils/migrations/mysql/two/up.sql' in migration_files) 45 | self.assertTrue( 46 | 'src/unittest/utils/migrations/mysql/three/up.sql' in migration_files) 47 | 48 | def test_add_slash(self): 49 | self.assertEqual(schema_change.add_slash('some/path')[-1], '/') 50 | self.assertEqual(schema_change.add_slash('some/path/')[-1], '/') 51 | 52 | def test_get_migration_name(self): 53 | self.assertEqual(schema_change.get_migration_name( 54 | 'src/unittest/utils/migrations/mysql/one/up.sql'), 'one') 55 | 56 | def test_get_migration_source(self): 57 | self.assertEqual(schema_change.get_migration_source( 58 | 'src/unittest/utils/migrations/mysql/one/down.sql').strip(), 'DROP TABLE one;') 59 | 60 | def test_get_connection(self): 61 | config = schema_change.get_config(self.config_path) 62 | databases = config['databases'] 63 | 64 | for tag in databases: 65 | # Set vars 66 | engine = databases[tag].get('engine', 'mysql') 67 | host = databases[tag].get('host', 'localhost') 68 | port = databases[tag].get('port', 3306) 69 | user = databases[tag].get('user') 70 | password = databases[tag].get('password') 71 | db = databases[tag].get('db') 72 | 73 | # Get database connection 74 | connection = schema_change.get_connection( 75 | engine, host, user, port, password, db, schema_change.get_ssl(databases[tag])) 76 | 77 | if engine == 'postgresql': 78 | self.assertIsInstance( 79 | connection, psycopg2.extensions.connection) 80 | else: 81 | self.assertIsInstance( 82 | connection, pymysql.connections.Connection) 83 | 84 | # Test exception for non existing engine 85 | self.assertRaises(RuntimeError, schema_change.get_connection, 86 | 'unknown_engine', None, None, None, None, None) 87 | 88 | def test_get_mysql_connection(self): 89 | config = schema_change.get_config(self.config_path) 90 | database = config['databases']['tag_mysql'] 91 | 92 | # Get database connection 93 | connection = schema_change.get_mysql_connection( 94 | database['host'], database['user'], database['port'], database['password'], database['db'], schema_change.get_ssl(database)) 95 | 96 | self.assertIsInstance( 97 | connection, pymysql.connections.Connection) 98 | 99 | def test_get_pg_connection(self): 100 | config = schema_change.get_config(self.config_path) 101 | database = config['databases']['tag_postgresql'] 102 | 103 | # Get database connection 104 | connection = schema_change.get_pg_connection( 105 | database['host'], database['user'], database['port'], database['password'], database['db'], schema_change.get_ssl(database)) 106 | 107 | self.assertIsInstance( 108 | connection, psycopg2.extensions.connection) 109 | 110 | def test_parse_statements(self): 111 | """ Test single query """ 112 | 113 | queries = """SELECT 1;""" 114 | parsed = schema_change.parse_statements(queries, engine='mysql') 115 | 116 | assert len(parsed) == 1 117 | assert parsed[0] == 'SELECT 1;' 118 | 119 | def test_parse_statements_2(self): 120 | """ Test single query without delimiter """ 121 | 122 | queries = """SELECT 1""" 123 | parsed = schema_change.parse_statements(queries, engine='mysql') 124 | 125 | assert len(parsed) == 1 126 | assert parsed[0] == 'SELECT 1' 127 | 128 | def test_parse_statements_3(self): 129 | """ Test multiple queries and comment removal """ 130 | 131 | queries = """ 132 | SELECT 1; 133 | -- Some comment 134 | SELECT 2; 135 | """ 136 | parsed = schema_change.parse_statements(queries, engine='mysql') 137 | 138 | assert len(parsed) == 2 139 | assert parsed[0] == 'SELECT 1;' 140 | assert parsed[1] == 'SELECT 2;' 141 | 142 | def test_parse_statements_4(self): 143 | """ Test delimiter change """ 144 | 145 | queries = """ 146 | SELECT 1; 147 | -- Some comment 148 | DELIMITER $$ 149 | SELECT 2$$ 150 | DELIMITER ; 151 | SELECT 3; 152 | """ 153 | parsed = schema_change.parse_statements(queries, engine='mysql') 154 | 155 | assert len(parsed) == 3 156 | assert parsed[0] == 'SELECT 1;' 157 | assert parsed[1] == 'SELECT 2;' 158 | assert parsed[2] == 'SELECT 3;' 159 | 160 | def test_parse_statements_5(self): 161 | """ Test for MySQL stored function """ 162 | 163 | queries = """ 164 | SELECT 1; 165 | 166 | DELIMITER $$ 167 | 168 | CREATE FUNCTION CustomerLevel(p_creditLimit double) RETURNS VARCHAR(10) 169 | BEGIN 170 | RETURN 1; 171 | END$$ 172 | 173 | DELIMITER ; 174 | 175 | SELECT 2; 176 | """ 177 | parsed = schema_change.parse_statements(queries, engine='mysql') 178 | 179 | assert len(parsed) == 3 180 | assert parsed[0] == 'SELECT 1;' 181 | assert parsed[1] == """CREATE FUNCTION CustomerLevel(p_creditLimit double) RETURNS VARCHAR(10) 182 | BEGIN 183 | RETURN 1; 184 | END;""" 185 | assert parsed[2] == 'SELECT 2;' 186 | 187 | def test_parse_statements_6(self): 188 | """ Test for PostgreSQL stored function """ 189 | 190 | queries = """ 191 | SELECT 1; 192 | 193 | CREATE OR REPLACE FUNCTION some function(a int, b bigint) 194 | RETURNS BOOLEAN 195 | LANGUAGE plpgsql 196 | SECURITY DEFINER 197 | AS $$ 198 | BEGIN 199 | UPDATE table 200 | SET something = b 201 | WHERE id = a; 202 | RETURN FOUND; 203 | END; 204 | $$; 205 | 206 | SELECT 2; 207 | """ 208 | parsed = schema_change.parse_statements( 209 | queries, engine='postgresql') 210 | 211 | assert len(parsed) == 3 212 | assert parsed[0] == 'SELECT 1;' 213 | assert parsed[1] == """CREATE OR REPLACE FUNCTION some function(a int, b bigint) 214 | RETURNS BOOLEAN 215 | LANGUAGE plpgsql 216 | SECURITY DEFINER 217 | AS $$ 218 | BEGIN 219 | UPDATE table 220 | SET something = b 221 | WHERE id = a; 222 | RETURN FOUND; 223 | END; 224 | $$;""" 225 | assert parsed[2] == 'SELECT 2;' 226 | 227 | def test_run_migration(self): 228 | config = schema_change.get_config(self.config_path) 229 | database = config['databases']['tag_postgresql'] 230 | 231 | # Get database connection 232 | connection = schema_change.get_pg_connection( 233 | database['host'], database['user'], database['port'], database['password'], database['db'], schema_change.get_ssl(database)) 234 | 235 | self.assertTrue(schema_change.run_migration( 236 | connection, 'SELECT 1', engine='postgresql')) 237 | 238 | def test_save_migration(self): 239 | config = schema_change.get_config(self.config_path) 240 | database = config['databases']['tag_postgresql'] 241 | 242 | # Get database connection 243 | connection = schema_change.get_pg_connection( 244 | database['host'], database['user'], database['port'], database['password'], database['db'], schema_change.get_ssl(database)) 245 | 246 | self.assertTrue(schema_change.save_migration( 247 | connection, 'some_migration')) 248 | 249 | def test_delete_migration(self): 250 | config = schema_change.get_config(self.config_path) 251 | database = config['databases']['tag_postgresql'] 252 | 253 | # Get database connection 254 | connection = schema_change.get_pg_connection( 255 | database['host'], database['user'], database['port'], database['password'], database['db'], schema_change.get_ssl(database)) 256 | 257 | self.assertTrue(schema_change.delete_migration( 258 | connection, 'some_migration')) 259 | 260 | def test_is_applied(self): 261 | migrations_applied = [ 262 | { 263 | 'name': 'one' 264 | }, 265 | { 266 | 'name': 'two' 267 | }, 268 | { 269 | 'name': 'three' 270 | }, 271 | ] 272 | 273 | self.assertTrue(schema_change.is_applied(migrations_applied, 'three')) 274 | self.assertFalse(schema_change.is_applied(migrations_applied, 'four')) 275 | 276 | def test_get_migrations_applied(self): 277 | config = schema_change.get_config(self.config_path) 278 | database = config['databases']['tag_postgresql'] 279 | 280 | # Get database connection 281 | connection = schema_change.get_pg_connection( 282 | database['host'], database['user'], database['port'], database['password'], database['db'], schema_change.get_ssl(database)) 283 | 284 | # Add fake migrations 285 | schema_change.save_migration(connection, 'some_migration') 286 | schema_change.save_migration(connection, 'some_migration_2') 287 | 288 | migrations_applied = schema_change.get_migrations_applied( 289 | database['engine'], connection) 290 | 291 | for migration in migrations_applied: 292 | self.assertIsInstance(migration['id'], int) 293 | self.assertIsInstance(migration['name'], str) 294 | self.assertIsInstance(migration['date'], datetime.datetime) 295 | 296 | # Delete fake migrations 297 | schema_change.delete_migration(connection, 'some_migration') 298 | schema_change.delete_migration(connection, 'some_migration_2') 299 | 300 | def test_get_migrations_applied_2(self): 301 | # Only loading empty databases to trigger an exception 302 | config = schema_change.get_config(self.config_path_empty_db) 303 | 304 | for tag in config['databases']: 305 | database = config['databases'][tag] 306 | 307 | # Get database connection 308 | connection = schema_change.get_connection( 309 | database['engine'], database['host'], database['user'], database['port'], database['password'], database['db'], schema_change.get_ssl(database)) 310 | 311 | # Test exception for non existing engine 312 | self.assertRaises(RuntimeError, schema_change.get_migrations_applied, 313 | database['engine'], connection) 314 | 315 | def test_apply_migrations(self): 316 | config = schema_change.get_config(self.config_path) 317 | database = config['databases']['tag_postgresql'] 318 | 319 | # Get database connection 320 | connection = schema_change.get_pg_connection( 321 | database['host'], database['user'], database['port'], database['password'], database['db'], schema_change.get_ssl(database)) 322 | 323 | self.assertTrue(schema_change.apply_migrations( 324 | database['engine'], connection, database['path'])) 325 | 326 | def test_rollback_migration(self): 327 | config = schema_change.get_config(self.config_path) 328 | database = config['databases']['tag_postgresql'] 329 | 330 | # Get database connection 331 | connection = schema_change.get_pg_connection( 332 | database['host'], database['user'], database['port'], database['password'], database['db'], schema_change.get_ssl(database)) 333 | 334 | self.assertTrue(schema_change.rollback_migration( 335 | database['engine'], connection, database['path'], 'one')) 336 | 337 | # Test exception for non existing engine 338 | self.assertRaises(RuntimeError, schema_change.rollback_migration, 339 | database['engine'], connection, database['path'], 'non_existent') 340 | 341 | def test_get_ssl(self): 342 | config = schema_change.get_config(self.config_path) 343 | 344 | for tag in config['databases']: 345 | database = config['databases'][tag] 346 | 347 | self.assertIsInstance(schema_change.get_ssl(database), dict) 348 | 349 | def test_apply(self): 350 | self.assertTrue(schema_change.apply(config_override=self.config_path, 351 | skip_missing=True)) 352 | self.assertTrue(schema_change.apply(config_override=self.config_path, 353 | tag_override='tag_mysql')) 354 | self.assertTrue(schema_change.apply(config_override=self.config_path, 355 | tag_override='tag_mysql', 356 | rollback='one')) 357 | self.assertTrue(schema_change.apply(config_override=self.config_path, 358 | skip_missing=True)) 359 | 360 | # Test exception for rollback without a tag 361 | self.assertRaises(RuntimeError, schema_change.apply, 362 | self.config_path, None, 'one') 363 | 364 | 365 | if __name__ == '__main__': 366 | unittest.main() 367 | --------------------------------------------------------------------------------