├── test_app ├── __init__.py ├── models.py ├── wsgi.py ├── migrations │ └── 001_initial.py ├── manage.py └── settings.py ├── Dockerfile ├── zero_downtime_migrations ├── __init__.py └── backend │ ├── __init__.py │ ├── exceptions.py │ ├── base.py │ ├── sql_template.py │ └── schema.py ├── run_tests.sh ├── .gitignore ├── setup.cfg ├── .travis.yml ├── pytest.ini ├── MANIFEST.in ├── docker-compose.yaml ├── AUTHORS ├── tests ├── conftest.py ├── test_add_unique.py ├── test_create_index.py ├── test_schema.py └── test_add_field.py ├── tox.ini ├── LICENSE ├── CONTRIBUTING.md ├── setup.py ├── changelog.md └── README.rst /test_app/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM themattrix/tox 2 | -------------------------------------------------------------------------------- /zero_downtime_migrations/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /run_tests.sh: -------------------------------------------------------------------------------- 1 | docker-compose run tox 2 | -------------------------------------------------------------------------------- /zero_downtime_migrations/backend/__init__.py: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *.pyc 2 | *.swp 3 | *.egg-info 4 | .idea 5 | .cache 6 | .tox 7 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [bdist_wheel] 2 | universal = 1 3 | 4 | [metadata] 5 | license_file = LICENSE 6 | -------------------------------------------------------------------------------- /test_app/models.py: -------------------------------------------------------------------------------- 1 | from django.db import models 2 | 3 | 4 | class TestModel(models.Model): 5 | name = models.CharField(max_length=250) 6 | -------------------------------------------------------------------------------- /zero_downtime_migrations/backend/exceptions.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | 3 | from __future__ import unicode_literals 4 | 5 | 6 | class InvalidIndexError(ValueError): 7 | pass 8 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | sudo: required 2 | 3 | language: python 4 | 5 | services: 6 | - docker 7 | 8 | script: 9 | - "travis_wait 30 sleep 1800 &" 10 | - docker-compose run tox 11 | -------------------------------------------------------------------------------- /pytest.ini: -------------------------------------------------------------------------------- 1 | [pytest] 2 | addopts=-l -q --boxed 3 | pep8ignore = E501 4 | norecursedirs = .robe .idea 5 | python_files = tests.py test_*.py 6 | DJANGO_SETTINGS_MODULE = test_app.settings 7 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include AUTHORS 2 | include README.rst 3 | include LICENSE 4 | 5 | recursive-include tests * 6 | recursive-exclude .tox * 7 | recursive-exclude .cache * 8 | recursive-exclude .git * 9 | -------------------------------------------------------------------------------- /docker-compose.yaml: -------------------------------------------------------------------------------- 1 | version: '2' 2 | 3 | services: 4 | db: 5 | image: postgres 6 | tox: 7 | build: . 8 | depends_on: 9 | - db 10 | volumes: 11 | - ".:/src:ro" 12 | -------------------------------------------------------------------------------- /AUTHORS: -------------------------------------------------------------------------------- 1 | 2 | The following authors have created the source code of "zero-downtime-migrations" published and distributed by YANDEX LLC as the owner: 3 | 4 | Vladimir Koljasinskij 5 | naohide <57.x.mas@gmail.com> 6 | leiserfg 7 | shigarus 8 | shadchin 9 | -------------------------------------------------------------------------------- /test_app/wsgi.py: -------------------------------------------------------------------------------- 1 | """ 2 | WSGI config for mysite project. 3 | 4 | It exposes the WSGI callable as a module-level variable named ``application``. 5 | 6 | For more information on this file, see 7 | https://docs.djangoproject.com/en/1.11/howto/deployment/wsgi/ 8 | """ 9 | 10 | import os 11 | 12 | from django.core.wsgi import get_wsgi_application 13 | 14 | os.environ.setdefault("DJANGO_SETTINGS_MODULE", "test_app.settings") 15 | 16 | application = get_wsgi_application() 17 | -------------------------------------------------------------------------------- /zero_downtime_migrations/backend/base.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | 3 | from __future__ import unicode_literals 4 | 5 | try: 6 | from django.db.backends.postgresql.base import DatabaseWrapper as BaseWrapper 7 | except ImportError: 8 | from django.db.backends.postgresql_psycopg2.base import DatabaseWrapper as BaseWrapper 9 | 10 | from .schema import DatabaseSchemaEditor 11 | 12 | 13 | class DatabaseWrapper(BaseWrapper): 14 | SchemaEditorClass = DatabaseSchemaEditor 15 | -------------------------------------------------------------------------------- /tests/conftest.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | 3 | from __future__ import unicode_literals 4 | 5 | import pytest 6 | 7 | from test_app.models import TestModel 8 | 9 | 10 | @pytest.fixture 11 | def test_object(): 12 | return TestModel.objects.create(name='some name') 13 | 14 | 15 | @pytest.fixture 16 | def test_object_two(): 17 | return TestModel.objects.create(name='some other name') 18 | 19 | 20 | @pytest.fixture 21 | def test_object_three(): 22 | return TestModel.objects.create(name='some different name') 23 | -------------------------------------------------------------------------------- /test_app/migrations/001_initial.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # Generated by Django 1.11.1 on 2017-07-20 10:14 3 | from __future__ import unicode_literals 4 | 5 | from django.db import migrations, models 6 | 7 | 8 | class Migration(migrations.Migration): 9 | 10 | initial = True 11 | 12 | dependencies = [ 13 | ] 14 | 15 | operations = [ 16 | migrations.CreateModel( 17 | name='TestClass', 18 | fields=[ 19 | ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), 20 | ('name', models.CharField(max_length=250)), 21 | ], 22 | options={ 23 | 'abstract': False, 24 | }, 25 | ), 26 | ] 27 | -------------------------------------------------------------------------------- /tox.ini: -------------------------------------------------------------------------------- 1 | [tox] 2 | envlist = {py27}-{django18,django19,django110,django111},{py34}-{django18,django19,django110,django111,django20},{py35,py36}-{django18,django19,django110,django111,django20,django21,django22} 3 | skipsdist = {env:TOXBUILD:false} 4 | 5 | [testenv] 6 | sitepackages = False 7 | deps= 8 | pytest==3.8.0 9 | pytest-xdist==1.23.1 10 | pytest-capturelog==0.7 11 | pytest-django==3.4.2 12 | psycopg2==2.7.3.2 13 | pytest-pep8==1.0.6 14 | freezegun==0.3.9 15 | pytz==2017.3 16 | mock==2.0.0 17 | 18 | django18: Django>=1.8,<1.9 19 | django19: Django>=1.9,<1.10 20 | django110: Django>=1.10,<1.11 21 | django111: Django>=1.11,<2.0 22 | django20: Django>=2.0,<2.1 23 | django21: Django>=2.1,<2.2 24 | django22: Django>=2.2,<2.3 25 | commands = {env:TOXBUILD:py.test} tests {posargs} 26 | -------------------------------------------------------------------------------- /test_app/manage.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | import os 3 | import sys 4 | 5 | if __name__ == "__main__": 6 | os.environ.setdefault("DJANGO_SETTINGS_MODULE", "test_app.settings") 7 | try: 8 | from django.core.management import execute_from_command_line 9 | except ImportError: 10 | # The above import may fail for some other reason. Ensure that the 11 | # issue is really that Django is missing to avoid masking other 12 | # exceptions on Python 2. 13 | try: 14 | import django 15 | except ImportError: 16 | raise ImportError( 17 | "Couldn't import Django. Are you sure it's installed and " 18 | "available on your PYTHONPATH environment variable? Did you " 19 | "forget to activate a virtual environment?" 20 | ) 21 | raise 22 | execute_from_command_line(sys.argv) 23 | -------------------------------------------------------------------------------- /zero_downtime_migrations/backend/sql_template.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | 3 | from __future__ import unicode_literals 4 | 5 | 6 | SQL_ESTIMATE_COUNT_IN_TABLE = "SELECT reltuples::BIGINT FROM pg_class WHERE relname = '%(table)s';" 7 | 8 | SQL_COUNT_IN_TABLE = "SELECT COUNT(*) FROM %(table)s;" 9 | 10 | SQL_COUNT_IN_TABLE_WITH_NULL = "SELECT COUNT(*) FROM %(table)s WHERE %(column)s is NULL;" 11 | 12 | SQL_UPDATE_BATCH = ("WITH cte AS ( " 13 | "SELECT %(pk_column_name)s as pk " 14 | "FROM %(table)s " 15 | "WHERE %(column)s is null " 16 | "LIMIT %(batch_size)s " 17 | ") " 18 | "UPDATE %(table)s table_ " 19 | "SET %(column)s = %(value)s " 20 | "FROM cte " 21 | "WHERE table_.%(pk_column_name)s = cte.pk" 22 | ) 23 | 24 | SQL_CHECK_COLUMN_STATUS = ("SELECT IS_NULLABLE, DATA_TYPE, COLUMN_DEFAULT from information_schema.columns " 25 | "where table_name = '%(table)s' and column_name = '%(column)s';") 26 | 27 | 28 | SQL_CREATE_UNIQUE_INDEX = "CREATE UNIQUE INDEX CONCURRENTLY %(name)s ON %(table)s (%(columns)s)%(extra)s" 29 | SQL_ADD_UNIQUE_CONSTRAINT_FROM_INDEX = "ALTER TABLE %(table)s ADD CONSTRAINT %(name)s UNIQUE USING INDEX %(index_name)s" 30 | SQL_CHECK_INDEX_STATUS = ("SELECT 1 FROM pg_class, pg_index WHERE pg_index.indisvalid = false " 31 | "AND pg_index.indexrelid = pg_class.oid and pg_class.relname = '%(index_name)s'") 32 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | zero-downtime-migrations is released under the BSD (3-clause) license 2 | ---------------------------------------------------------- 3 | Copyright (c) 2018, YANDEX LLC 4 | All rights reserved. 5 | 6 | Redistribution and use in source and binary forms, with or without modification, are permitted provided 7 | that the following conditions are met: 8 | 9 | 1. Redistributions of source code must retain the above copyright notice, this list of conditions and 10 | the following disclaimer. 11 | 12 | 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and 13 | the following disclaimer in the documentation and/or other materials provided with the distribution. 14 | 15 | 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote 16 | products derived from this software without specific prior written permission. 17 | 18 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, 19 | INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 20 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 21 | SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR 22 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, 23 | WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 24 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 25 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Notice to external contributors 2 | 3 | 4 | ## General info 5 | 6 | Hello! In order for us (YANDEX LLC) to accept patches and other contributions from you, you will have to adopt our Yandex Contributor License Agreement (the “**CLA**”). The current version of the CLA can be found here: 7 | 1) https://yandex.ru/legal/cla/?lang=en (in English) and 8 | 2) https://yandex.ru/legal/cla/?lang=ru (in Russian). 9 | 10 | By adopting the CLA, you state the following: 11 | 12 | * You obviously wish and are willingly licensing your contributions to us for our open source projects under the terms of the CLA, 13 | * You have read the terms and conditions of the CLA and agree with them in full, 14 | * You are legally able to provide and license your contributions as stated, 15 | * We may use your contributions for our open source projects and for any other our project too, 16 | * We rely on your assurances concerning the rights of third parties in relation to your contributions. 17 | 18 | If you agree with these principles, please read and adopt our CLA. By providing us your contributions, you hereby declare that you have already read and adopt our CLA, and we may freely merge your contributions with our corresponding open source project and use it in further in accordance with terms and conditions of the CLA. 19 | 20 | ## Provide contributions 21 | 22 | If you have already adopted terms and conditions of the CLA, you are able to provide your contributions. When you submit your pull request, please add the following information into it: 23 | 24 | ``` 25 | I hereby agree to the terms of the CLA available at: [link]. 26 | ``` 27 | 28 | Replace the bracketed text as follows: 29 | * [link] is the link to the current version of the CLA: https://yandex.ru/legal/cla/?lang=en (in English) or https://yandex.ru/legal/cla/?lang=ru (in Russian). 30 | 31 | It is enough to provide us such notification once. 32 | 33 | ## Other questions 34 | 35 | If you have any questions, please mail us at opensource@yandex-team.ru. 36 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | import codecs 4 | import os 5 | 6 | from setuptools import setup, find_packages 7 | 8 | 9 | # Utility function to read the README file. 10 | # Used for the long_description. It's nice, because now 1) we have a top level 11 | # README file and 2) it's easier to type in the README file than to put a raw 12 | # string in below ... 13 | def read(fname): 14 | file_path = os.path.join(os.path.dirname(__file__), fname) 15 | return codecs.open(file_path, encoding='utf-8').read() 16 | 17 | 18 | setup( 19 | name='zero-downtime-migrations', 20 | version='0.12', 21 | author='Vladimir Koljasinskij', 22 | author_email='smosker@yandex-team.ru', 23 | license='BSD-3-Clause', 24 | url='https://github.com/yandex/zero-downtime-migrations', 25 | description='django migrations without long locks', 26 | long_description=read('README.rst'), 27 | classifiers=['Development Status :: 4 - Beta', 28 | 'Framework :: Django', 29 | 'Framework :: Django :: 1.8', 30 | 'Framework :: Django :: 1.9', 31 | 'Framework :: Django :: 1.10', 32 | 'Framework :: Django :: 1.11', 33 | 'Framework :: Django :: 2.0', 34 | 'Framework :: Django :: 2.1', 35 | 'Intended Audience :: Developers', 36 | 'License :: OSI Approved :: BSD License', 37 | 'Operating System :: OS Independent', 38 | 'Topic :: Software Development :: Libraries :: Python Modules', 39 | 'Programming Language :: Python', 40 | 'Programming Language :: Python :: 2.7', 41 | 'Programming Language :: Python :: 3.4', 42 | 'Programming Language :: Python :: 3.5', 43 | 'Programming Language :: Python :: 3.6', 44 | ], 45 | keywords='django postgresql migrations', 46 | packages=find_packages(), 47 | python_requires='>=2.7,!=3.1.*,!=3.0.*,!=3.2.*,!=3.3.*,<4.0', 48 | install_requires=[ 49 | 'Django>=1.3', 50 | 'psycopg2>=2.7.3.2', 51 | 'packaging', 52 | ] 53 | ) 54 | -------------------------------------------------------------------------------- /changelog.md: -------------------------------------------------------------------------------- 1 | 0.12 2 | --- 3 | * Add support Python 3.11 (#32) 4 | 5 | [Vladimir Koljasinskij](smosker@gmail.com) 2023-12-25 17:58:27+03:00 6 | 7 | 8 | 0.11 9 | --- 10 | * Add support Python 3.11 (#28) 11 | 12 | [Vladimir Koljasinskij](smosker@gmail.com) 2022-12-06 16:24:27+03:00 13 | 14 | 15 | 0.10 16 | --- 17 | * Fix infinite loop when updating with default=None (#26) 18 | 19 | [Vladimir Koljasinskij](smosker@gmail.com) 2021-08-26 16:24:27+03:00 20 | 21 | 22 | 0.9 23 | --- 24 | * Remove usage of deprecated inspect.getargspec if posible (#24) 25 | 26 | [Vladimir Koljasinskij](smosker@gmail.com) 2021-06-08 16:24:27+03:00 27 | 28 | 29 | 0.8 30 | --- 31 | * fix work with DateTimeField, see https://github.com/yandex/zero-downtime-migrations/issues/20 32 | 33 | [Vladimir Koljasinskij](smosker@gmail.com) 2019-09-05 14:24:27+03:00 34 | 35 | 36 | 0.7 37 | --- 38 | * add checking index status after creation 39 | 40 | [Vladimir Koljasinskij](smosker@gmail.com) 2019-07-08 11:55:27+03:00 41 | 42 | 0.6 43 | --- 44 | * fix clashing method name 45 | 46 | [Vladimir Koljasinskij](smosker@gmail.com) 2019-06-03 14:37:27+03:00 47 | 48 | 0.5 49 | --- 50 | * refactoring 51 | * Add ability to add unique property without long lock 52 | * README: fix typos 53 | 54 | [Vladimir Koljasinskij](smosker@gmail.com) 2019-05-20 11:25:27+03:00 55 | 56 | 0.4 57 | --- 58 | * add options to run from default SchemaEditor 59 | 60 | [Vladimir Koljasinskij](smosker@gmail.com) 2018-07-04 10:41:27+03:00 61 | 62 | 0.3 63 | --- 64 | * fix issue with default values 65 | 66 | [Vladimir Koljasinskij](smosker@gmail.com) 2018-04-25 14:16:27+03:00 67 | 68 | 0.2 69 | --- 70 | * Update readme, add output info while performing update [ https://github.com/Smosker/zero-downtime-migrations/commit/ef48514 ] 71 | * read me changes [ https://github.com/Smosker/zero-downtime-migrations/commit/1a8e752 ] 72 | * Update README.rst [ https://github.com/Smosker/zero-downtime-migrations/commit/68042f0 ] 73 | * new python versions support (#7) [ https://github.com/Smosker/zero-downtime-migrations/commit/8ea8a21 ] 74 | 75 | [Vladimir Koljasinskij](smosker@gmail.com) 2018-01-25 11:45:34+03:00 76 | 77 | 0.1 78 | --- 79 | * initial release 80 | 81 | [Vladimir Koljasinskij](smosker@gmail.com) 2018-01-17 14:16:27+03:00 82 | -------------------------------------------------------------------------------- /tests/test_add_unique.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | 3 | from __future__ import unicode_literals 4 | 5 | import pytest 6 | import django 7 | import re 8 | 9 | from distutils.version import StrictVersion 10 | from django.db import models 11 | from django.db import connections 12 | from django.test.utils import CaptureQueriesContext 13 | 14 | from zero_downtime_migrations.backend.schema import DatabaseSchemaEditor 15 | from test_app.models import TestModel 16 | 17 | connection = connections['default'] 18 | schema_editor = DatabaseSchemaEditor 19 | DJANGO_VERISON = StrictVersion(django.get_version()) 20 | 21 | 22 | @pytest.mark.django_db(transaction=True) 23 | def test_add_unique_correct_queries(): 24 | old_field = models.IntegerField() 25 | old_field.set_attributes_from_name("name") 26 | 27 | field = models.IntegerField(unique=True) 28 | field.set_attributes_from_name("name") 29 | if DJANGO_VERISON >= StrictVersion('2.1'): 30 | index_pattern = r'CREATE UNIQUE INDEX CONCURRENTLY "test_app_testmodel_name_\w+(_uniq)?" ON "test_app_testmodel" \("name"\)' 31 | check_index_pattern = r"SELECT 1 FROM pg_class, pg_index WHERE pg_index.indisvalid = false AND pg_index.indexrelid = pg_class.oid and pg_class.relname = 'test_app_testmodel_name_\w+(_uniq)?'" 32 | constraint_pattern = r'ALTER TABLE test_app_testmodel ADD CONSTRAINT test_app_testmodel_name_\w+(_uniq)? UNIQUE USING INDEX test_app_testmodel_name_\w+(_uniq)?' 33 | expected_queries = 3 34 | else: 35 | index_pattern = r'ALTER TABLE "test_app_testmodel" ADD CONSTRAINT "?test_app_testmodel_name_\w+(_uniq)?"? UNIQUE \("name"\)' 36 | check_index_pattern = None 37 | constraint_pattern = None 38 | expected_queries = 1 39 | with CaptureQueriesContext(connection) as ctx, schema_editor(connection=connection) as editor: 40 | editor.alter_field(TestModel, old_field, field) 41 | assert len(ctx.captured_queries) == expected_queries 42 | assert re.search(index_pattern, ctx.captured_queries[0]['sql']) is not None 43 | if check_index_pattern: 44 | assert re.search(check_index_pattern, ctx.captured_queries[1]['sql']) is not None 45 | if constraint_pattern: 46 | assert re.search(constraint_pattern, ctx.captured_queries[2]['sql']) is not None 47 | 48 | 49 | @pytest.mark.django_db(transaction=True) 50 | def test_add_unique_correct_result(): 51 | old_field = models.IntegerField() 52 | old_field.set_attributes_from_name("name") 53 | 54 | field = models.IntegerField(unique=True) 55 | field.set_attributes_from_name("name") 56 | with schema_editor(connection=connection) as editor: 57 | editor.alter_field(TestModel, old_field, field) 58 | TestModel.objects.create(name='test') 59 | TestModel.objects.create(name='smth') 60 | with pytest.raises(django.db.utils.IntegrityError): 61 | TestModel.objects.create(name='test') 62 | -------------------------------------------------------------------------------- /test_app/settings.py: -------------------------------------------------------------------------------- 1 | """ 2 | Django settings for mysite project. 3 | 4 | Generated by 'django-admin startproject' using Django 1.11.7. 5 | 6 | For more information on this file, see 7 | https://docs.djangoproject.com/en/1.11/topics/settings/ 8 | 9 | For the full list of settings and their values, see 10 | https://docs.djangoproject.com/en/1.11/ref/settings/ 11 | """ 12 | 13 | import os 14 | 15 | # Build paths inside the project like this: os.path.join(BASE_DIR, ...) 16 | BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) 17 | 18 | 19 | # Quick-start development settings - unsuitable for production 20 | # See https://docs.djangoproject.com/en/1.11/howto/deployment/checklist/ 21 | 22 | # SECURITY WARNING: keep the secret key used in production secret! 23 | SECRET_KEY = 'tkl@ki*y@8*yma%x67(sa^tj()-yax138#&n^_@3!x0q*fhxj9' 24 | 25 | # SECURITY WARNING: don't run with debug turned on in production! 26 | DEBUG = True 27 | 28 | ALLOWED_HOSTS = [] 29 | 30 | 31 | # Application definition 32 | 33 | INSTALLED_APPS = [ 34 | 'django.contrib.admin', 35 | 'django.contrib.auth', 36 | 'django.contrib.contenttypes', 37 | 'django.contrib.sessions', 38 | 'django.contrib.messages', 39 | 'django.contrib.staticfiles', 40 | 'test_app', 41 | ] 42 | 43 | MIDDLEWARE = [ 44 | 'django.middleware.security.SecurityMiddleware', 45 | 'django.contrib.sessions.middleware.SessionMiddleware', 46 | 'django.middleware.common.CommonMiddleware', 47 | 'django.middleware.csrf.CsrfViewMiddleware', 48 | 'django.contrib.auth.middleware.AuthenticationMiddleware', 49 | 'django.contrib.messages.middleware.MessageMiddleware', 50 | 'django.middleware.clickjacking.XFrameOptionsMiddleware', 51 | ] 52 | 53 | ROOT_URLCONF = 'test_app.urls' 54 | 55 | TEMPLATES = [ 56 | { 57 | 'BACKEND': 'django.template.backends.django.DjangoTemplates', 58 | 'DIRS': [], 59 | 'APP_DIRS': True, 60 | 'OPTIONS': { 61 | 'context_processors': [ 62 | 'django.template.context_processors.debug', 63 | 'django.template.context_processors.request', 64 | 'django.contrib.auth.context_processors.auth', 65 | 'django.contrib.messages.context_processors.messages', 66 | ], 67 | }, 68 | }, 69 | ] 70 | 71 | WSGI_APPLICATION = 'test_app.wsgi.application' 72 | 73 | 74 | # Database 75 | # https://docs.djangoproject.com/en/1.11/ref/settings/#databases 76 | 77 | DATABASES = { 78 | 'default': { 79 | 'ENGINE': 'django.db.backends.postgresql_psycopg2', 80 | 'NAME': 'postgres', 81 | 'USER': 'postgres', 82 | 'HOST': 'db', 83 | 'PORT': '5432', 84 | }, 85 | } 86 | 87 | 88 | # Password validation 89 | # https://docs.djangoproject.com/en/1.11/ref/settings/#auth-password-validators 90 | 91 | AUTH_PASSWORD_VALIDATORS = [ 92 | { 93 | 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', 94 | }, 95 | { 96 | 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', 97 | }, 98 | { 99 | 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', 100 | }, 101 | { 102 | 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator', 103 | }, 104 | ] 105 | 106 | 107 | # Internationalization 108 | # https://docs.djangoproject.com/en/1.11/topics/i18n/ 109 | 110 | LANGUAGE_CODE = 'en-us' 111 | 112 | TIME_ZONE = 'UTC' 113 | 114 | USE_I18N = True 115 | 116 | USE_L10N = True 117 | 118 | USE_TZ = True 119 | 120 | 121 | # Static files (CSS, JavaScript, Images) 122 | # https://docs.djangoproject.com/en/1.11/howto/static-files/ 123 | 124 | STATIC_URL = '/static/' 125 | -------------------------------------------------------------------------------- /tests/test_create_index.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | 3 | from __future__ import unicode_literals 4 | 5 | import pytest 6 | import re 7 | import django 8 | 9 | from django.db import models 10 | from django.db import connections 11 | from django.test.utils import CaptureQueriesContext 12 | from distutils.version import StrictVersion 13 | 14 | from zero_downtime_migrations.backend.schema import DatabaseSchemaEditor 15 | from zero_downtime_migrations.backend.exceptions import InvalidIndexError 16 | from test_app.models import TestModel 17 | 18 | connection = connections['default'] 19 | schema_editor = DatabaseSchemaEditor 20 | DJANGO_VERISON = StrictVersion(django.get_version()) 21 | 22 | 23 | @pytest.mark.django_db(transaction=True) 24 | def test_create_index_success(): 25 | TestModel.objects.all().delete() 26 | old_field = models.IntegerField() 27 | old_field.set_attributes_from_name("name") 28 | 29 | field = models.IntegerField(db_index=True) 30 | field.set_attributes_from_name("name") 31 | pattern = r'CREATE INDEX CONCURRENTLY "test_app_testmodel_name_\w+(_uniq)?" ON "test_app_testmodel" \("name"\)' 32 | search_pattern = r"SELECT 1 FROM pg_class, pg_index WHERE pg_index.indisvalid = false AND pg_index.indexrelid = pg_class.oid and pg_class.relname = 'test_app_testmodel_name_\w+(_uniq)?'" 33 | with CaptureQueriesContext(connection) as ctx, schema_editor(connection=connection) as editor: 34 | editor.alter_field(TestModel, old_field, field) 35 | assert len(ctx.captured_queries) == 2 36 | assert re.search(pattern, ctx.captured_queries[0]['sql']) is not None 37 | assert re.search(search_pattern, ctx.captured_queries[1]['sql']) is not None 38 | 39 | 40 | @pytest.mark.django_db(transaction=True) 41 | def test_sqlmigrate_create_index_working(): 42 | TestModel.objects.all().delete() 43 | old_field = models.IntegerField() 44 | old_field.set_attributes_from_name("name") 45 | 46 | field = models.IntegerField(db_index=True) 47 | field.set_attributes_from_name("name") 48 | pattern = r'CREATE INDEX CONCURRENTLY "test_app_testmodel_name_\w+(_uniq)?" ON "test_app_testmodel" \("name"\)' 49 | with schema_editor(connection=connection, collect_sql=True) as editor: 50 | editor.alter_field(TestModel, old_field, field) 51 | assert len(editor.collected_sql) == 1 52 | assert re.search(pattern, editor.collected_sql[0]) is not None 53 | 54 | 55 | @pytest.mark.django_db(transaction=True) 56 | def test_create_index_fail(): 57 | TestModel.objects.create(name='test_unique') 58 | TestModel.objects.create(name='test_unique') 59 | 60 | old_field = models.IntegerField() 61 | old_field.set_attributes_from_name("name") 62 | 63 | field = models.IntegerField(unique=True) 64 | field.set_attributes_from_name("name") 65 | 66 | if DJANGO_VERISON >= StrictVersion('2.1'): 67 | create_pattern = r'CREATE UNIQUE INDEX CONCURRENTLY "test_app_testmodel_name_\w+(_uniq)?" ON "test_app_testmodel" \("name"\)' 68 | search_pattern = r"SELECT 1 FROM pg_class, pg_index WHERE pg_index.indisvalid = false AND pg_index.indexrelid = pg_class.oid and pg_class.relname = 'test_app_testmodel_name_\w+(_uniq)?'" 69 | drop_pattern = r"DROP INDEX CONCURRENTLY IF EXISTS test_app_testmodel_name_\w+(_uniq)?" 70 | with CaptureQueriesContext(connection) as ctx, schema_editor(connection=connection) as editor: 71 | with pytest.raises(InvalidIndexError): 72 | editor.alter_field(TestModel, old_field, field) 73 | assert len(ctx.captured_queries) == 3 74 | assert re.search(create_pattern, ctx.captured_queries[0]['sql']) is not None 75 | assert re.search(search_pattern, ctx.captured_queries[1]['sql']) is not None 76 | assert re.search(drop_pattern, ctx.captured_queries[2]['sql']) is not None 77 | else: 78 | with CaptureQueriesContext(connection) as ctx, schema_editor(connection=connection) as editor: 79 | index_pattern = r'ALTER TABLE "test_app_testmodel" ADD CONSTRAINT "?test_app_testmodel_name_\w+(_uniq)?"? UNIQUE \("name"\)' 80 | with pytest.raises(django.db.utils.IntegrityError): 81 | editor.alter_field(TestModel, old_field, field) 82 | assert len(ctx.captured_queries) == 1 83 | assert re.search(index_pattern, ctx.captured_queries[0]['sql']) is not None 84 | -------------------------------------------------------------------------------- /tests/test_schema.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | 3 | from __future__ import unicode_literals 4 | 5 | import pytest 6 | from mock import patch, call 7 | 8 | from django.db import models 9 | from django.db import connections 10 | from django.db.migrations.questioner import InteractiveMigrationQuestioner 11 | from django.test.utils import CaptureQueriesContext 12 | 13 | from zero_downtime_migrations.backend.schema import DatabaseSchemaEditor 14 | from test_app.models import TestModel 15 | 16 | pytestmark = pytest.mark.django_db 17 | connection = connections['default'] 18 | schema_editor = DatabaseSchemaEditor 19 | 20 | 21 | @pytest.fixture 22 | def add_column(): 23 | sql = 'ALTER TABLE "test_app_testmodel" ADD COLUMN "bool_field" BOOLEAN NULL;' 24 | with connection.cursor() as cursor: 25 | cursor.execute(sql, ()) 26 | 27 | 28 | def base_questioner_test(choice_return): 29 | field = models.BooleanField(default=True) 30 | field.set_attributes_from_name("bool_field") 31 | with CaptureQueriesContext(connection) as ctx: 32 | with patch.object(InteractiveMigrationQuestioner, '_choice_input') as choice_mock: 33 | with schema_editor(connection=connection) as editor: 34 | choice_mock.return_value = choice_return 35 | editor.add_field(TestModel, field) 36 | 37 | queries = [query_data['sql'] for query_data in ctx.captured_queries 38 | if 'test_app' in query_data['sql']] 39 | return choice_mock, queries 40 | 41 | 42 | def test_retry_with_exit_working(add_column): 43 | with pytest.raises(SystemExit): 44 | base_questioner_test(1) 45 | 46 | 47 | def test_retry_with_drop_working(add_column): 48 | _, queries = base_questioner_test(2) 49 | assert queries == [("SELECT IS_NULLABLE, DATA_TYPE, COLUMN_DEFAULT from information_schema.columns " 50 | "where table_name = 'test_app_testmodel' and column_name = 'bool_field';"), 51 | 'ALTER TABLE "test_app_testmodel" DROP COLUMN "bool_field" CASCADE', 52 | 'ALTER TABLE "test_app_testmodel" ADD COLUMN "bool_field" boolean NULL', 53 | 'ALTER TABLE "test_app_testmodel" ALTER COLUMN "bool_field" SET DEFAULT true', 54 | "SELECT reltuples::BIGINT FROM pg_class WHERE relname = 'test_app_testmodel';", 55 | 'SELECT COUNT(*) FROM test_app_testmodel;', 56 | 'ALTER TABLE "test_app_testmodel" ALTER COLUMN "bool_field" SET NOT NULL', 57 | 'ALTER TABLE "test_app_testmodel" ALTER COLUMN "bool_field" DROP DEFAULT', 58 | ] 59 | 60 | 61 | def test_retry_with_choice_working(add_column): 62 | choice_mock, queries = base_questioner_test(3) 63 | calls = [call(('It look like column "bool_field" in table ' 64 | '"test_app_testmodel" already exist with ' 65 | 'following parameters: TYPE: "boolean", ' 66 | 'DEFAULT: "None", NULLABLE: "YES".'), 67 | ('abort migration', u'drop column and run migration from beginning', 68 | 'manually choose action to start from', 69 | 'show how many rows still need to be updated', 70 | 'mark operation as successful and proceed to next operation', 71 | 'drop column and run migration from standard SchemaEditor', 72 | ), 73 | ), 74 | call('Now choose from which action process should continue', 75 | ['add field with default', 76 | 'update existing rows', 77 | 'set not null for field', 78 | 'drop default', 79 | ]), 80 | ] 81 | choice_mock.assert_has_calls(calls) 82 | assert queries == [("SELECT IS_NULLABLE, DATA_TYPE, COLUMN_DEFAULT from information_schema.columns where " 83 | "table_name = 'test_app_testmodel' and column_name = 'bool_field';"), 84 | 'ALTER TABLE "test_app_testmodel" ALTER COLUMN "bool_field" SET NOT NULL', 85 | 'ALTER TABLE "test_app_testmodel" ALTER COLUMN "bool_field" DROP DEFAULT', 86 | ] 87 | 88 | 89 | def test_retry_with_skip_working(add_column): 90 | choice_mock, queries = base_questioner_test(5) 91 | choice_mock.assert_called_once_with(('It look like column "bool_field" in table ' 92 | '"test_app_testmodel" already exist with ' 93 | 'following parameters: TYPE: "boolean", ' 94 | 'DEFAULT: "None", NULLABLE: "YES".'), 95 | ('abort migration', u'drop column and run migration from beginning', 96 | 'manually choose action to start from', 97 | 'show how many rows still need to be updated', 98 | 'mark operation as successful and proceed to next operation', 99 | 'drop column and run migration from standard SchemaEditor', 100 | ), 101 | ) 102 | assert len(queries) == 1 103 | assert queries[0] == ("SELECT IS_NULLABLE, DATA_TYPE, COLUMN_DEFAULT from information_schema.columns " 104 | "where table_name = 'test_app_testmodel' and column_name = 'bool_field';") 105 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | .. image:: https://img.shields.io/pypi/v/zero-downtime-migrations.svg?style=flat 2 | :alt: PyPI Version 3 | :target: https://pypi.python.org/pypi/zero-downtime-migrations 4 | 5 | .. image:: https://img.shields.io/pypi/pyversions/zero-downtime-migrations.svg 6 | :alt: Supported Python versions 7 | :target: https://pypi.python.org/pypi/zero-downtime-migrations 8 | 9 | .. image:: https://travis-ci.org/yandex/zero-downtime-migrations.svg?branch=master 10 | :alt: Build Status 11 | :target: https://travis-ci.org/yandex/zero-downtime-migrations 12 | 13 | 14 | Zero-Downtime-Migrations 15 | ======================== 16 | 17 | Description 18 | ----------- 19 | *Zero-Downtime-Migrations (ZDM)* -- this is application which allow you to avoid long locks (and rewriting the whole table) 20 | while applying Django migrations using PostgreSql as database. 21 | 22 | Current possibilities 23 | --------------------- 24 | * add field with default value (nullable or not) 25 | * create index concurrently (and check index status after creation in case it was created with INVALID status) 26 | * add unique property to existing field through creating unique index concurrently and creating constraint using this index 27 | 28 | Why use it 29 | ---------- 30 | We face such a problem - performing some django migrations (such as add column with default value) lock the table on 31 | read/write, so its impossible for our services to work properly during this periods. It can be acceptable on rather small 32 | tables (less than million rows), but even on them it can be painful if service is high loaded. 33 | But we have a lot of tables with more than 50 millions rows, and applying migrations on such a table lock it for 34 | more than an hour, which is totally unacceptable. Also, during this time consuming operations, migration rather often fail 35 | because of different errors (such as TimeoutError) and we have to start it from scratch or run sql manually thought 36 | psql and when fake migration. 37 | 38 | So in the end we have an idea of writing this package so it can prevent long locks on table and also 39 | provide more stable migration process which can be continued if operation fall for some reason. 40 | 41 | Installation 42 | ------------ 43 | To install :code:`ZDM`, simply run: 44 | 45 | .. code:: bash 46 | 47 | pip install zero-downtime-migrations 48 | 49 | Usage 50 | ----- 51 | If you are currently using default postresql backend change it to: 52 | 53 | .. code:: python 54 | 55 | DATABASES = { 56 | 'default': { 57 | 'ENGINE': 'zero_downtime_migrations.backend', 58 | ... 59 | } 60 | ... 61 | } 62 | 63 | 64 | If you are using your own custom backend you can: 65 | 66 | * Set :code:`SchemaEditorClass` if you are currently using default one: 67 | 68 | .. code:: python 69 | 70 | from zero_downtime_migrations.backend.schema import DatabaseSchemaEditor 71 | 72 | class DatabaseWrapper(BaseWrapper): 73 | SchemaEditorClass = DatabaseSchemaEditor 74 | 75 | 76 | * Add :code:`ZeroDownTimeMixin` to base classes of your :code:`DatabaseSchemaEditor` if you are using custom one: 77 | 78 | .. code:: python 79 | 80 | from zero_downtime_migrations.backend.schema import ZeroDownTimeMixin 81 | 82 | class YourCustomSchemaEditor(ZeroDownTimeMixin, ...): 83 | ... 84 | 85 | Note about indexes 86 | ------------------ 87 | Library will always force CONCURRENTLY index creation and after that check index status - if index was 88 | created with INVALID status it will be deleted and error will be raised. 89 | In this case you should fix problem if needed and restart migration. 90 | For example if creating unique index was failed you should make sure that there are only unique values 91 | in column on which index is creating. 92 | Usually index creating with invalid status due to deadlock so you need just restart migration. 93 | 94 | Example 95 | ------- 96 | When adding not null column with default django will perform such sql query: 97 | 98 | .. code:: sql 99 | 100 | ALTER TABLE "test" ADD COLUMN "field" boolean DEFAULT True NOT NULL; 101 | 102 | Which cause postgres to rewrite the whole table and when swap it with existing one (`note from django documentation `_) 103 | and during this period it will hold exclusive lock on write/read on this table. 104 | 105 | This package will break sql above in separate commands not only to prevent the rewriting of whole 106 | table but also to add column with as small lock times as possible. 107 | 108 | First of all we will add nullable column without default and add default value to it in separate command in one transaction: 109 | 110 | .. code:: sql 111 | 112 | ALTER TABLE "test" ADD COLUMN "field" boolean NULL; 113 | ALTER TABLE "test" ALTER COLUMN "field" SET DEFAULT true; 114 | 115 | This will add default for all new rows in table but all existing ones will be with null value in this column for now, 116 | this operation will be quick because postgres doesn't have to fill all existing rows with default. 117 | 118 | Next we will count objects in table and if result if more than zero - calculate the 119 | size of batch in witch we will update existing rows. After that while where are still objects with null in this 120 | column - we will update them. 121 | 122 | While result of following statement is more than zero: 123 | 124 | .. code:: sql 125 | 126 | WITH cte AS ( 127 | SELECT as pk 128 | FROM "test" 129 | WHERE "field" is null 130 | LIMIT 131 | ) 132 | UPDATE "test" table_ 133 | SET "field" = true 134 | FROM cte 135 | WHERE table_. = cte.pk 136 | 137 | When we have no more rows with null in this column we can set not null and drop default (which is django default 138 | behavior): 139 | 140 | .. code:: sql 141 | 142 | ALTER TABLE "test" ALTER COLUMN "field" SET NOT NULL; 143 | ALTER TABLE "test" ALTER COLUMN "field" DROP DEFAULT; 144 | 145 | So we finish add field process. 146 | It will be definitely more time consuming than basic variant with one sql statement, but in this approach 147 | there are no long locks on table so service can work normally during this migrations process. 148 | 149 | Run tests 150 | --------- 151 | 152 | .. code:: bash 153 | 154 | ./run_tests.sh 155 | -------------------------------------------------------------------------------- /zero_downtime_migrations/backend/schema.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | 3 | from __future__ import unicode_literals 4 | 5 | import re 6 | import sys 7 | import inspect 8 | 9 | from packaging.version import Version 10 | 11 | try: 12 | from django.db.backends.postgresql.schema import DatabaseSchemaEditor as BaseEditor 13 | except ImportError: 14 | from django.db.backends.postgresql_psycopg2.schema import DatabaseSchemaEditor as BaseEditor 15 | 16 | import django 17 | from django.db.models.fields import NOT_PROVIDED 18 | from django.db.models.fields.related import RelatedField 19 | from django.db import transaction 20 | from django.db.migrations.questioner import InteractiveMigrationQuestioner 21 | 22 | from zero_downtime_migrations.backend.sql_template import ( 23 | SQL_ESTIMATE_COUNT_IN_TABLE, 24 | SQL_CHECK_COLUMN_STATUS, 25 | SQL_COUNT_IN_TABLE, 26 | SQL_COUNT_IN_TABLE_WITH_NULL, 27 | SQL_UPDATE_BATCH, 28 | SQL_CREATE_UNIQUE_INDEX, 29 | SQL_ADD_UNIQUE_CONSTRAINT_FROM_INDEX, 30 | SQL_CHECK_INDEX_STATUS, 31 | ) 32 | 33 | from zero_downtime_migrations.backend.exceptions import InvalidIndexError 34 | 35 | DJANGO_VERISON = Version(django.get_version()) 36 | TABLE_SIZE_FOR_MAX_BATCH = 500000 37 | MAX_BATCH_SIZE = 10000 38 | MIN_BATCH_SIZE = 1000 39 | 40 | _getargspec = getattr(inspect, 'getfullargspec', getattr(inspect, 'getargspec', None)) 41 | 42 | class ZeroDownTimeMixin(object): 43 | RETRY_QUESTION_TEMPLATE = ('It look like column "{}" in table "{}" already exist with following ' 44 | 'parameters: TYPE: "{}", DEFAULT: "{}", NULLABLE: "{}".' 45 | ) 46 | RETRY_CHOICES = ( 47 | 'abort migration', 48 | 'drop column and run migration from beginning', 49 | 'manually choose action to start from', 50 | 'show how many rows still need to be updated', 51 | 'mark operation as successful and proceed to next operation', 52 | 'drop column and run migration from standard SchemaEditor', 53 | ) 54 | 55 | ADD_FIELD_WITH_DEFAULT_ACTIONS = [ 56 | 'add field with default', 57 | 'update existing rows', 58 | 'set not null for field', 59 | 'drop default', 60 | ] 61 | 62 | def alter_field(self, model, old_field, new_field, strict=False): 63 | 64 | if DJANGO_VERISON >= Version('2.1'): 65 | from django.db.backends.ddl_references import IndexName 66 | if self._unique_should_be_added(old_field, new_field): 67 | table = model._meta.db_table 68 | index_name = str(IndexName(table, [new_field.column], '_uniq', self._create_index_name)) 69 | self.execute( 70 | self._create_index_sql(model, [new_field], name=index_name, sql=SQL_CREATE_UNIQUE_INDEX) 71 | ) 72 | self.execute(self._create_unique_constraint_from_index_sql(table, index_name)) 73 | self.already_added_unique = True 74 | 75 | return super(ZeroDownTimeMixin, self).alter_field(model, old_field, new_field, strict=strict) 76 | 77 | def _field_supported(self, field): 78 | supported = True 79 | if isinstance(field, RelatedField): 80 | supported = False 81 | elif field.default is NOT_PROVIDED: 82 | supported = False 83 | 84 | if (DJANGO_VERISON >= Version('1.10') and 85 | (getattr(field, 'auto_now', False) or 86 | getattr(field, 'auto_now_add', False)) 87 | ): 88 | self.date_default = True 89 | supported = True 90 | return supported 91 | 92 | def add_field(self, model, field): 93 | if not self._field_supported(field=field): 94 | return super(ZeroDownTimeMixin, self).add_field(model, field) 95 | 96 | # Checking which actions we should perform - maybe this operation was run 97 | # before and it had crashed for some reason 98 | actions = self.get_actions_to_perform(model, field) 99 | if not actions: 100 | return 101 | 102 | # Saving initial values 103 | default_effective_value = self.effective_default(field) 104 | nullable = field.null 105 | # Update the values to the required ones 106 | field.default = None if DJANGO_VERISON < Version('1.11') else NOT_PROVIDED 107 | if getattr(self, 'date_default', False): 108 | if getattr(field, 'auto_now', False): 109 | field.auto_now = False 110 | if getattr(field, 'auto_now_add', False): 111 | field.auto_now_add = False 112 | if nullable is False: 113 | field.null = True 114 | 115 | # For Django < 1.10 116 | atomic = getattr(self, 'atomic_migration', True) 117 | 118 | if self.connection.in_atomic_block: 119 | self.atomic.__exit__(None, None, None) 120 | 121 | available_args = { 122 | 'model': model, 123 | 'field': field, 124 | 'nullable': nullable, 125 | 'default_effective_value': default_effective_value, 126 | } 127 | # Performing needed actions 128 | for action in actions: 129 | action = '_'.join(action.split()) 130 | func = getattr(self, action) 131 | func_args = {arg: available_args[arg] for arg in 132 | _getargspec(func).args if arg != 'self' 133 | } 134 | func(**func_args) 135 | 136 | # If migrations was atomic=True initially 137 | # entering atomic block again 138 | if atomic: 139 | self.atomic = transaction.atomic(self.connection.alias) 140 | self.atomic.__enter__() 141 | 142 | def add_field_with_default(self, model, field, default_effective_value): 143 | """ 144 | Adding field with default in two separate 145 | operations, so we can avoid rewriting the 146 | whole table 147 | """ 148 | with transaction.atomic(): 149 | super(ZeroDownTimeMixin, self).add_field(model, field) 150 | self.add_default(model, field, default_effective_value) 151 | 152 | def update_existing_rows(self, model, field, default_effective_value): 153 | """ 154 | Updating existing rows in table by (relatively) small batches 155 | to avoid long locks on table 156 | """ 157 | if default_effective_value is None: 158 | return 159 | objects_in_table = self.count_objects_in_table(model=model) 160 | if objects_in_table > 0: 161 | objects_in_batch_count = self.get_objects_in_batch_count(objects_in_table) 162 | while True: 163 | with transaction.atomic(): 164 | updated = self.update_batch(model=model, field=field, 165 | objects_in_batch_count=objects_in_batch_count, 166 | value=default_effective_value, 167 | ) 168 | print('Update {} rows in {}'.format(updated, model._meta.db_table)) 169 | if updated is None or updated == 0: 170 | break 171 | 172 | def set_not_null_for_field(self, model, field, nullable): 173 | # If field was not null - adding 174 | # this knowledge to table 175 | if nullable is False: 176 | self.set_not_null(model, field) 177 | 178 | def get_column_info(self, model, field): 179 | sql = SQL_CHECK_COLUMN_STATUS % { 180 | "table": model._meta.db_table, 181 | "column": field.name, 182 | } 183 | return self.get_query_result(sql) 184 | 185 | def get_actions_to_perform(self, model, field): 186 | actions = self.ADD_FIELD_WITH_DEFAULT_ACTIONS 187 | # Checking maybe this column already exists 188 | # if so asking user what to do next 189 | column_info = self.get_column_info(model, field) 190 | 191 | if column_info is not None: 192 | existed_nullable, existed_type, existed_default = column_info 193 | 194 | questioner = InteractiveMigrationQuestioner() 195 | 196 | question = self.RETRY_QUESTION_TEMPLATE.format( 197 | field.name, model._meta.db_table, 198 | existed_type, existed_default, 199 | existed_nullable, 200 | ) 201 | 202 | result = questioner._choice_input(question, self.RETRY_CHOICES) 203 | if result == 1: 204 | sys.exit(1) 205 | elif result == 2: 206 | self.remove_field(model, field) 207 | elif result == 3: 208 | question = 'Now choose from which action process should continue' 209 | result = questioner._choice_input(question, actions) 210 | actions = actions[result - 1:] 211 | elif result == 4: 212 | question = 'Rows in table where column is null: "{}"' 213 | need_to_update = self.need_to_update(model=model, field=field) 214 | questioner._choice_input(question.format(need_to_update), 215 | ('Continue',) 216 | ) 217 | return self.get_actions_to_perform(model, field) 218 | elif result == 5: 219 | actions = [] 220 | elif result == 6: 221 | self.remove_field(model, field) 222 | super(ZeroDownTimeMixin, self).add_field(model, field) 223 | actions = [] 224 | return actions 225 | 226 | def get_pk_column_name(self, model): 227 | return model._meta.pk.name 228 | 229 | def update_batch(self, model, field, objects_in_batch_count, value): 230 | pk_column_name = self.get_pk_column_name(model) 231 | sql = SQL_UPDATE_BATCH % { 232 | "table": model._meta.db_table, 233 | "column": field.name, 234 | "batch_size": objects_in_batch_count, 235 | "pk_column_name": pk_column_name, 236 | "value": "%s", 237 | } 238 | params = [value] 239 | return self.get_query_result(sql, params, row_count=True) 240 | 241 | def get_objects_in_batch_count(self, model_count): 242 | """ 243 | Calculate batch size 244 | :param model_count: int 245 | :return: int 246 | """ 247 | if model_count > TABLE_SIZE_FOR_MAX_BATCH: 248 | value = MAX_BATCH_SIZE 249 | else: 250 | value = int((model_count / 100) * 5) 251 | return max(MIN_BATCH_SIZE, value) 252 | 253 | def get_query_result(self, sql, params=(), row_count=False): 254 | """ 255 | Default django backend execute function does not 256 | return any result so we use this custom where needed 257 | """ 258 | if self.collect_sql: 259 | # in collect_sql case use django function logic 260 | return self.execute(sql, params) 261 | 262 | with self.connection.cursor() as cursor: 263 | cursor.execute(sql, params) 264 | if row_count: 265 | return cursor.rowcount 266 | return cursor.fetchone() 267 | 268 | def parse_cursor_result(self, cursor_result, place=0, collect_sql_value=1, ): 269 | result = None 270 | if self.collect_sql: 271 | result = collect_sql_value # For sqlmigrate purpose 272 | elif cursor_result: 273 | result = cursor_result[place] 274 | return result 275 | 276 | def execute_table_query(self, sql, model): 277 | sql = sql % { 278 | "table": model._meta.db_table 279 | } 280 | cursor_result = self.get_query_result(sql) 281 | return self.parse_cursor_result(cursor_result=cursor_result) 282 | 283 | def count_objects_in_table(self, model): 284 | count = self.execute_table_query( 285 | sql=SQL_ESTIMATE_COUNT_IN_TABLE, 286 | model=model, 287 | ) 288 | if count in [0, -1]: 289 | # Check, maybe statistic is outdated? 290 | # Because previous count return 0 or -1 it will be fast query 291 | count = self.execute_table_query( 292 | sql=SQL_COUNT_IN_TABLE, 293 | model=model, 294 | ) 295 | return count 296 | 297 | def need_to_update(self, model, field): 298 | sql = SQL_COUNT_IN_TABLE_WITH_NULL % { 299 | "table": model._meta.db_table, 300 | "column": field.name, 301 | } 302 | cursor_result = self.get_query_result(sql) 303 | return self.parse_cursor_result(cursor_result=cursor_result) 304 | 305 | def drop_default(self, model, field): 306 | set_default_sql, params = self._alter_column_default_sql_local(field, drop=True) 307 | self.execute_alter_column(model, set_default_sql, params) 308 | 309 | def add_default(self, model, field, default_value): 310 | set_default_sql, params = self._alter_column_default_sql_local(field, default_value) 311 | self.execute_alter_column(model, set_default_sql, params) 312 | 313 | def set_not_null(self, model, field): 314 | set_not_null_sql = self.generate_set_not_null(field) 315 | self.execute_alter_column(model, set_not_null_sql) 316 | 317 | def execute_alter_column(self, model, changes_sql, params=()): 318 | sql = self.sql_alter_column % { 319 | "table": self.quote_name(model._meta.db_table), 320 | "changes": changes_sql, 321 | } 322 | self.execute(sql, params) 323 | 324 | def generate_set_not_null(self, field): 325 | new_db_params = field.db_parameters(connection=self.connection) 326 | sql = self.sql_alter_column_not_null 327 | return sql % { 328 | 'column': self.quote_name(field.column), 329 | 'type': new_db_params['type'], 330 | } 331 | 332 | def _alter_column_default_sql_local(self, field, default_value=None, drop=False): 333 | """ 334 | Copy this method from django2.0 335 | https://github.com/django/django/blob/master/django/db/backends/base/schema.py#L787 336 | """ 337 | default = '%s' 338 | params = [default_value] 339 | 340 | if drop: 341 | params = [] 342 | 343 | new_db_params = field.db_parameters(connection=self.connection) 344 | sql = self.sql_alter_column_no_default if drop else self.sql_alter_column_default 345 | return ( 346 | sql % { 347 | 'column': self.quote_name(field.column), 348 | 'type': new_db_params['type'], 349 | 'default': default, 350 | }, 351 | params, 352 | ) 353 | 354 | def _unique_should_be_added(self, old_field, new_field): 355 | if getattr(self, 'already_added_unique', False): 356 | return False 357 | return super(ZeroDownTimeMixin, self)._unique_should_be_added(old_field, new_field) 358 | 359 | def _create_unique_constraint_from_index_sql(self, table, index_name): 360 | return SQL_ADD_UNIQUE_CONSTRAINT_FROM_INDEX % { 361 | "table": table, 362 | "name": index_name, 363 | "index_name": index_name, 364 | } 365 | 366 | def _check_index_sql(self, index_name): 367 | return SQL_CHECK_INDEX_STATUS % { 368 | "index_name": index_name, 369 | } 370 | 371 | def _check_valid_index(self, sql): 372 | """ 373 | Return index_name if it's invalid 374 | """ 375 | index_match = re.match(r'.* "(?P.+)" ON .+', sql) 376 | if index_match: 377 | index_name = index_match.group('index_name') 378 | check_index_sql = self._check_index_sql(index_name) 379 | cursor_result = self.get_query_result(check_index_sql) 380 | if self.parse_cursor_result(cursor_result=cursor_result): 381 | return index_name 382 | 383 | def _create_unique_failed(self, exc): 384 | return (DJANGO_VERISON >= Version('2.1') 385 | and 'could not create unique index' in repr(exc) 386 | ) 387 | 388 | def execute(self, sql, params=()): 389 | exit_atomic = False 390 | # Account for non-string statement objects. 391 | sql = str(sql) 392 | 393 | if re.search('(CREATE|DROP).+INDEX', sql): 394 | exit_atomic = True 395 | if 'CONCURRENTLY' not in sql: 396 | sql = sql.replace('INDEX', 'INDEX CONCURRENTLY') 397 | atomic = self.connection.in_atomic_block 398 | if exit_atomic and atomic: 399 | self.atomic.__exit__(None, None, None) 400 | try: 401 | super(ZeroDownTimeMixin, self).execute(sql, params) 402 | except django.db.utils.IntegrityError as exc: 403 | # create unique index should be treated differently 404 | # because it raises error, instead of quiet exit 405 | if not self._create_unique_failed(exc): 406 | raise 407 | 408 | if exit_atomic and not self.collect_sql: 409 | invalid_index_name = self._check_valid_index(sql) 410 | if invalid_index_name: 411 | # index was build, but invalid, we need to delete it 412 | self.execute(self.sql_delete_index % {'name': invalid_index_name}) 413 | raise InvalidIndexError( 414 | 'Unsuccessful attempt to create an index, fix data if needed and restart.' 415 | 'Sql was: {}'.format(sql) 416 | ) 417 | if exit_atomic and atomic: 418 | self.atomic = transaction.atomic(self.connection.alias) 419 | self.atomic.__enter__() 420 | 421 | 422 | class DatabaseSchemaEditor(ZeroDownTimeMixin, BaseEditor): 423 | pass 424 | -------------------------------------------------------------------------------- /tests/test_add_field.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | 3 | from __future__ import unicode_literals 4 | 5 | import pytest 6 | import pytz 7 | import django 8 | 9 | from datetime import datetime 10 | from distutils.version import StrictVersion 11 | from django.db import models 12 | from django.db import connections 13 | from django.test.utils import CaptureQueriesContext 14 | from freezegun import freeze_time 15 | 16 | from zero_downtime_migrations.backend.schema import DatabaseSchemaEditor 17 | from test_app.models import TestModel 18 | 19 | pytestmark = pytest.mark.django_db 20 | connection = connections['default'] 21 | schema_editor = DatabaseSchemaEditor 22 | DJANGO_VERISON = StrictVersion(django.get_version()) 23 | 24 | 25 | def column_classes(model): 26 | with connection.cursor() as cursor: 27 | columns = { 28 | d[0]: (connection.introspection.get_field_type(d[1], d), d) 29 | for d in connection.introspection.get_table_description( 30 | cursor, 31 | model._meta.db_table, 32 | ) 33 | } 34 | return columns 35 | 36 | 37 | def test_sqlmigrate_add_field_working(): 38 | field = models.BooleanField(default=True) 39 | field.set_attributes_from_name("bool_field") 40 | with schema_editor(connection=connection, collect_sql=True) as editor: 41 | editor.add_field(TestModel, field) 42 | assert editor.collected_sql == [ 43 | "SELECT IS_NULLABLE, DATA_TYPE, COLUMN_DEFAULT from information_schema.columns where table_name = 'test_app_testmodel' and column_name = 'bool_field';", 44 | 'ALTER TABLE "test_app_testmodel" ADD COLUMN "bool_field" boolean NULL;', 45 | 'ALTER TABLE "test_app_testmodel" ALTER COLUMN "bool_field" SET DEFAULT true;', 46 | "SELECT reltuples::BIGINT FROM pg_class WHERE relname = 'test_app_testmodel';", 47 | ("WITH cte AS ( SELECT id as pk FROM test_app_testmodel WHERE bool_field is null LIMIT 1000 )" 48 | " UPDATE test_app_testmodel table_ SET bool_field = true FROM cte WHERE table_.id = cte.pk;"), 49 | 'ALTER TABLE "test_app_testmodel" ALTER COLUMN "bool_field" SET NOT NULL;', 50 | 'ALTER TABLE "test_app_testmodel" ALTER COLUMN "bool_field" DROP DEFAULT;' 51 | ] 52 | 53 | 54 | def test_add_bool_field_no_existed_objects_success(): 55 | columns = column_classes(TestModel) 56 | assert "bool_field" not in columns 57 | 58 | field = models.BooleanField(default=True) 59 | field.set_attributes_from_name("bool_field") 60 | 61 | with CaptureQueriesContext(connection) as ctx, schema_editor(connection=connection) as editor: 62 | editor.add_field(TestModel, field) 63 | 64 | columns = column_classes(TestModel) 65 | assert columns['bool_field'][0] == "BooleanField" 66 | queries = [query_data['sql'] for query_data in ctx.captured_queries if 'test_app' in query_data['sql']] 67 | expected_queries = [("SELECT IS_NULLABLE, DATA_TYPE, COLUMN_DEFAULT from information_schema.columns where " 68 | "table_name = 'test_app_testmodel' and column_name = 'bool_field';"), 69 | 'ALTER TABLE "test_app_testmodel" ADD COLUMN "bool_field" boolean NULL', 70 | 'ALTER TABLE "test_app_testmodel" ALTER COLUMN "bool_field" SET DEFAULT true', 71 | "SELECT reltuples::BIGINT FROM pg_class WHERE relname = 'test_app_testmodel';", 72 | 'SELECT COUNT(*) FROM test_app_testmodel;', 73 | 'ALTER TABLE "test_app_testmodel" ALTER COLUMN "bool_field" SET NOT NULL', 74 | 'ALTER TABLE "test_app_testmodel" ALTER COLUMN "bool_field" DROP DEFAULT', 75 | ] 76 | assert queries == expected_queries 77 | 78 | 79 | def test_add_bool_field_with_existed_object_success(test_object): 80 | columns = column_classes(TestModel) 81 | assert "bool_field" not in columns 82 | 83 | field = models.BooleanField(default=True) 84 | field.set_attributes_from_name("bool_field") 85 | with CaptureQueriesContext(connection) as ctx, schema_editor(connection=connection) as editor: 86 | editor.add_field(TestModel, field) 87 | queries = [query_data['sql'] for query_data in ctx.captured_queries if 88 | 'test_app' in query_data['sql']] 89 | 90 | columns = column_classes(TestModel) 91 | assert columns['bool_field'][0] == "BooleanField" 92 | expected_queries = [("SELECT IS_NULLABLE, DATA_TYPE, COLUMN_DEFAULT from information_schema.columns where " 93 | "table_name = 'test_app_testmodel' and column_name = 'bool_field';"), 94 | 'ALTER TABLE "test_app_testmodel" ADD COLUMN "bool_field" boolean NULL', 95 | 'ALTER TABLE "test_app_testmodel" ALTER COLUMN "bool_field" SET DEFAULT true', 96 | "SELECT reltuples::BIGINT FROM pg_class WHERE relname = 'test_app_testmodel';", 97 | 'SELECT COUNT(*) FROM test_app_testmodel;', 98 | ("WITH cte AS ( SELECT id as pk FROM test_app_testmodel WHERE bool_field is null LIMIT 1000 )" 99 | " UPDATE test_app_testmodel table_ SET bool_field = true FROM cte WHERE table_.id = cte.pk"), 100 | ("WITH cte AS ( SELECT id as pk FROM test_app_testmodel WHERE bool_field is null LIMIT 1000 )" 101 | " UPDATE test_app_testmodel table_ SET bool_field = true FROM cte WHERE table_.id = cte.pk"), 102 | 'ALTER TABLE "test_app_testmodel" ALTER COLUMN "bool_field" SET NOT NULL', 103 | 'ALTER TABLE "test_app_testmodel" ALTER COLUMN "bool_field" DROP DEFAULT', 104 | ] 105 | assert queries == expected_queries 106 | sql = 'SELECT * from "test_app_testmodel" where id = %s' 107 | with connection.cursor() as cursor: 108 | cursor.execute(sql, (test_object.id, )) 109 | result = cursor.fetchall() 110 | assert result == [(test_object.id, test_object.name, True)] 111 | 112 | 113 | def test_add_bool_field_with_existed_many_objects_success(test_object, test_object_two, test_object_three, ): 114 | columns = column_classes(TestModel) 115 | assert "bool_field" not in columns 116 | 117 | field = models.BooleanField(default=True) 118 | field.set_attributes_from_name("bool_field") 119 | with CaptureQueriesContext(connection) as ctx, schema_editor(connection=connection) as editor: 120 | editor.add_field(TestModel, field) 121 | queries = [query_data['sql'] for query_data in ctx.captured_queries if 122 | 'test_app' in query_data['sql']] 123 | 124 | columns = column_classes(TestModel) 125 | assert columns['bool_field'][0] == "BooleanField" 126 | expected_queries = [("SELECT IS_NULLABLE, DATA_TYPE, COLUMN_DEFAULT from information_schema.columns where " 127 | "table_name = 'test_app_testmodel' and column_name = 'bool_field';"), 128 | 'ALTER TABLE "test_app_testmodel" ADD COLUMN "bool_field" boolean NULL', 129 | 'ALTER TABLE "test_app_testmodel" ALTER COLUMN "bool_field" SET DEFAULT true', 130 | "SELECT reltuples::BIGINT FROM pg_class WHERE relname = 'test_app_testmodel';", 131 | 'SELECT COUNT(*) FROM test_app_testmodel;', 132 | ("WITH cte AS ( SELECT id as pk FROM test_app_testmodel WHERE bool_field is null LIMIT 1000 )" 133 | " UPDATE test_app_testmodel table_ SET bool_field = true FROM cte WHERE table_.id = cte.pk"), 134 | ("WITH cte AS ( SELECT id as pk FROM test_app_testmodel WHERE bool_field is null LIMIT 1000 )" 135 | " UPDATE test_app_testmodel table_ SET bool_field = true FROM cte WHERE table_.id = cte.pk"), 136 | 'ALTER TABLE "test_app_testmodel" ALTER COLUMN "bool_field" SET NOT NULL', 137 | 'ALTER TABLE "test_app_testmodel" ALTER COLUMN "bool_field" DROP DEFAULT', 138 | ] 139 | assert queries == expected_queries 140 | sql = 'SELECT * from "test_app_testmodel" where id = ANY(%s) ORDER BY id' 141 | with connection.cursor() as cursor: 142 | cursor.execute(sql, ([test_object.id, test_object_two.id, test_object_three.id], )) 143 | result = cursor.fetchall() 144 | assert result == [(test_object.id, test_object.name, True), 145 | (test_object_two.id, test_object_two.name, True), 146 | (test_object_three.id, test_object_three.name, True), 147 | ] 148 | 149 | 150 | @freeze_time("2017-12-15 03:21:34", tz_offset=-3) 151 | def test_add_datetime_field_no_existed_objects_success(): 152 | columns = column_classes(TestModel) 153 | assert "datetime_field" not in columns 154 | 155 | field = models.DateTimeField(default=datetime.now) 156 | field.set_attributes_from_name("datetime_field") 157 | 158 | with CaptureQueriesContext(connection) as ctx, schema_editor(connection=connection) as editor: 159 | editor.add_field(TestModel, field) 160 | queries = [query_data['sql'] for query_data in ctx.captured_queries if 161 | 'test_app' in query_data['sql']] 162 | 163 | columns = column_classes(TestModel) 164 | assert columns['datetime_field'][0] == "DateTimeField" 165 | expected_queries = [("SELECT IS_NULLABLE, DATA_TYPE, COLUMN_DEFAULT from information_schema.columns where table_name " 166 | "= 'test_app_testmodel' and column_name = 'datetime_field';"), 167 | 'ALTER TABLE "test_app_testmodel" ADD COLUMN "datetime_field" timestamp with time zone NULL', 168 | 'ALTER TABLE "test_app_testmodel" ALTER COLUMN "datetime_field" SET DEFAULT \'2017-12-15T00:21:34+00:00\'::timestamptz', 169 | "SELECT reltuples::BIGINT FROM pg_class WHERE relname = 'test_app_testmodel';", 170 | 'SELECT COUNT(*) FROM test_app_testmodel;', 171 | 'ALTER TABLE "test_app_testmodel" ALTER COLUMN "datetime_field" SET NOT NULL', 172 | 'ALTER TABLE "test_app_testmodel" ALTER COLUMN "datetime_field" DROP DEFAULT', 173 | ] 174 | assert queries == expected_queries 175 | 176 | 177 | @freeze_time("2017-12-15 03:21:34", tz_offset=-3) 178 | def test_add_datetime_field_with_existed_object_success(test_object): 179 | columns = column_classes(TestModel) 180 | assert "datetime_field" not in columns 181 | 182 | field = models.DateTimeField(default=datetime.now) 183 | field.set_attributes_from_name("datetime_field") 184 | with CaptureQueriesContext(connection) as ctx, schema_editor(connection=connection) as editor: 185 | editor.add_field(TestModel, field) 186 | queries = [query_data['sql'] for query_data in ctx.captured_queries if 187 | 'test_app' in query_data['sql']] 188 | 189 | columns = column_classes(TestModel) 190 | assert columns['datetime_field'][0] == "DateTimeField" 191 | expected_queries = [("SELECT IS_NULLABLE, DATA_TYPE, COLUMN_DEFAULT from information_schema.columns where " 192 | "table_name = 'test_app_testmodel' and column_name = 'datetime_field';"), 193 | 'ALTER TABLE "test_app_testmodel" ADD COLUMN "datetime_field" timestamp with time zone NULL', 194 | 'ALTER TABLE "test_app_testmodel" ALTER COLUMN "datetime_field" SET DEFAULT \'2017-12-15T00:21:34+00:00\'::timestamptz', 195 | "SELECT reltuples::BIGINT FROM pg_class WHERE relname = 'test_app_testmodel';", 196 | 'SELECT COUNT(*) FROM test_app_testmodel;', 197 | ("WITH cte AS ( SELECT id as pk FROM test_app_testmodel WHERE datetime_field is null LIMIT 1000 ) " 198 | "UPDATE test_app_testmodel table_ SET datetime_field = \'2017-12-15T00:21:34+00:00\'::timestamptz FROM cte WHERE table_.id = cte.pk"), 199 | ("WITH cte AS ( SELECT id as pk FROM test_app_testmodel WHERE datetime_field is null LIMIT 1000" 200 | " ) UPDATE test_app_testmodel table_ SET datetime_field = \'2017-12-15T00:21:34+00:00\'::timestamptz FROM cte WHERE table_.id = cte.pk"), 201 | 'ALTER TABLE "test_app_testmodel" ALTER COLUMN "datetime_field" SET NOT NULL', 202 | 'ALTER TABLE "test_app_testmodel" ALTER COLUMN "datetime_field" DROP DEFAULT', 203 | ] 204 | assert queries == expected_queries 205 | sql = 'SELECT * from "test_app_testmodel" where id = %s' 206 | with connection.cursor() as cursor: 207 | cursor.execute(sql, (test_object.id, )) 208 | result = cursor.fetchall() 209 | assert result == [(test_object.id, test_object.name, datetime(2017, 12, 15, 0, 21, 34, tzinfo=pytz.UTC))] 210 | 211 | 212 | @freeze_time("2017-12-15 03:21:34", tz_offset=-3) 213 | def test_add_datetime_field_with_auto_now_add_existed_object_success(test_object): 214 | columns = column_classes(TestModel) 215 | assert "datetime_field" not in columns 216 | 217 | field = models.DateTimeField(auto_now_add=True, null=True) 218 | field.set_attributes_from_name("datetime_field") 219 | with CaptureQueriesContext(connection) as ctx, schema_editor(connection=connection) as editor: 220 | editor.add_field(TestModel, field) 221 | queries = [query_data['sql'] for query_data in ctx.captured_queries if 222 | 'test_app' in query_data['sql']] 223 | 224 | columns = column_classes(TestModel) 225 | assert columns['datetime_field'][0] == "DateTimeField" 226 | if DJANGO_VERISON >= StrictVersion('1.10'): 227 | expected_queries = [("SELECT IS_NULLABLE, DATA_TYPE, COLUMN_DEFAULT from information_schema.columns where " 228 | "table_name = 'test_app_testmodel' and column_name = 'datetime_field';"), 229 | 'ALTER TABLE "test_app_testmodel" ADD COLUMN "datetime_field" timestamp with time zone NULL', 230 | 'ALTER TABLE "test_app_testmodel" ALTER COLUMN "datetime_field" SET DEFAULT \'2017-12-15T03:21:34+00:00\'::timestamptz', 231 | "SELECT reltuples::BIGINT FROM pg_class WHERE relname = 'test_app_testmodel';", 232 | 'SELECT COUNT(*) FROM test_app_testmodel;', 233 | ("WITH cte AS ( SELECT id as pk FROM test_app_testmodel WHERE datetime_field is null LIMIT 1000 ) " 234 | "UPDATE test_app_testmodel table_ SET datetime_field = \'2017-12-15T03:21:34+00:00\'::timestamptz FROM cte WHERE table_.id = cte.pk"), 235 | ("WITH cte AS ( SELECT id as pk FROM test_app_testmodel WHERE datetime_field is null LIMIT 1000" 236 | " ) UPDATE test_app_testmodel table_ SET datetime_field = \'2017-12-15T03:21:34+00:00\'::timestamptz FROM cte WHERE table_.id = cte.pk"), 237 | 'ALTER TABLE "test_app_testmodel" ALTER COLUMN "datetime_field" DROP DEFAULT', 238 | ] 239 | assert queries == expected_queries 240 | sql = 'SELECT * from "test_app_testmodel" where id = %s' 241 | with connection.cursor() as cursor: 242 | cursor.execute(sql, (test_object.id, )) 243 | result = cursor.fetchall() 244 | assert result == [(test_object.id, test_object.name, datetime(2017, 12, 15, 3, 21, 34, tzinfo=pytz.UTC))] 245 | 246 | 247 | @freeze_time("2017-12-15 03:21:34", tz_offset=-3) 248 | def test_add_datetime_field_with_auto_now_existed_object_success(test_object): 249 | columns = column_classes(TestModel) 250 | assert "datetime_field" not in columns 251 | 252 | field = models.DateTimeField(auto_now=True, null=True) 253 | field.set_attributes_from_name("datetime_field") 254 | with CaptureQueriesContext(connection) as ctx, schema_editor(connection=connection) as editor: 255 | editor.add_field(TestModel, field) 256 | queries = [query_data['sql'] for query_data in ctx.captured_queries if 257 | 'test_app' in query_data['sql']] 258 | 259 | columns = column_classes(TestModel) 260 | assert columns['datetime_field'][0] == "DateTimeField" 261 | if DJANGO_VERISON >= StrictVersion('1.10'): 262 | expected_queries = [("SELECT IS_NULLABLE, DATA_TYPE, COLUMN_DEFAULT from information_schema.columns where " 263 | "table_name = 'test_app_testmodel' and column_name = 'datetime_field';"), 264 | 'ALTER TABLE "test_app_testmodel" ADD COLUMN "datetime_field" timestamp with time zone NULL', 265 | 'ALTER TABLE "test_app_testmodel" ALTER COLUMN "datetime_field" SET DEFAULT \'2017-12-15T03:21:34+00:00\'::timestamptz', 266 | "SELECT reltuples::BIGINT FROM pg_class WHERE relname = 'test_app_testmodel';", 267 | 'SELECT COUNT(*) FROM test_app_testmodel;', 268 | ("WITH cte AS ( SELECT id as pk FROM test_app_testmodel WHERE datetime_field is null LIMIT 1000 ) " 269 | "UPDATE test_app_testmodel table_ SET datetime_field = \'2017-12-15T03:21:34+00:00\'::timestamptz FROM cte WHERE table_.id = cte.pk"), 270 | ("WITH cte AS ( SELECT id as pk FROM test_app_testmodel WHERE datetime_field is null LIMIT 1000" 271 | " ) UPDATE test_app_testmodel table_ SET datetime_field = \'2017-12-15T03:21:34+00:00\'::timestamptz FROM cte WHERE table_.id = cte.pk"), 272 | 'ALTER TABLE "test_app_testmodel" ALTER COLUMN "datetime_field" DROP DEFAULT', 273 | ] 274 | assert queries == expected_queries 275 | sql = 'SELECT * from "test_app_testmodel" where id = %s' 276 | with connection.cursor() as cursor: 277 | cursor.execute(sql, (test_object.id, )) 278 | result = cursor.fetchall() 279 | assert result == [(test_object.id, test_object.name, datetime(2017, 12, 15, 3, 21, 34, tzinfo=pytz.UTC))] 280 | 281 | 282 | @freeze_time("2017-12-15 03:21:34", tz_offset=-3) 283 | def test_add_datetime_field_with_existed_many_objects_success(test_object, test_object_two, test_object_three, ): 284 | columns = column_classes(TestModel) 285 | assert "datetime_field" not in columns 286 | 287 | field = models.DateTimeField(default=datetime.now) 288 | field.set_attributes_from_name("datetime_field") 289 | with CaptureQueriesContext(connection) as ctx, schema_editor(connection=connection) as editor: 290 | editor.add_field(TestModel, field) 291 | queries = [query_data['sql'] for query_data in ctx.captured_queries if 292 | 'test_app' in query_data['sql']] 293 | 294 | columns = column_classes(TestModel) 295 | assert columns['datetime_field'][0] == "DateTimeField" 296 | expected_queries = [("SELECT IS_NULLABLE, DATA_TYPE, COLUMN_DEFAULT from information_schema.columns where " 297 | "table_name = 'test_app_testmodel' and column_name = 'datetime_field';"), 298 | 'ALTER TABLE "test_app_testmodel" ADD COLUMN "datetime_field" timestamp with time zone NULL', 299 | 'ALTER TABLE "test_app_testmodel" ALTER COLUMN "datetime_field" SET DEFAULT \'2017-12-15T00:21:34+00:00\'::timestamptz', 300 | "SELECT reltuples::BIGINT FROM pg_class WHERE relname = 'test_app_testmodel';", 301 | 'SELECT COUNT(*) FROM test_app_testmodel;', 302 | ("WITH cte AS ( SELECT id as pk FROM test_app_testmodel WHERE datetime_field is null LIMIT 1000 ) " 303 | "UPDATE test_app_testmodel table_ SET datetime_field = \'2017-12-15T00:21:34+00:00\'::timestamptz FROM cte WHERE table_.id = cte.pk"), 304 | ("WITH cte AS ( SELECT id as pk FROM test_app_testmodel WHERE datetime_field is null LIMIT 1000 ) UPDATE test_app_testmodel table_ " 305 | "SET datetime_field = \'2017-12-15T00:21:34+00:00\'::timestamptz FROM cte WHERE table_.id = cte.pk"), 306 | 'ALTER TABLE "test_app_testmodel" ALTER COLUMN "datetime_field" SET NOT NULL', 307 | 'ALTER TABLE "test_app_testmodel" ALTER COLUMN "datetime_field" DROP DEFAULT', 308 | ] 309 | assert queries == expected_queries 310 | sql = 'SELECT * from "test_app_testmodel" where id = ANY(%s) ORDER BY id' 311 | with connection.cursor() as cursor: 312 | cursor.execute(sql, ([test_object.id, test_object_two.id, test_object_three.id], )) 313 | result = cursor.fetchall() 314 | assert result == [(test_object.id, test_object.name, datetime(2017, 12, 15, 0, 21, 34, tzinfo=pytz.UTC)), 315 | (test_object_two.id, test_object_two.name, datetime(2017, 12, 15, 0, 21, 34, tzinfo=pytz.UTC)), 316 | (test_object_three.id, test_object_three.name, datetime(2017, 12, 15, 0, 21, 34, tzinfo=pytz.UTC)), 317 | ] 318 | 319 | --------------------------------------------------------------------------------