├── sql_server
├── __init__.py
└── pyodbc
│ ├── management
│ ├── __init__.py
│ └── commands
│ │ ├── __init__.py
│ │ └── install_regex_clr.py
│ ├── __init__.py
│ ├── regex_clr.dll
│ ├── client.py
│ ├── features.py
│ ├── creation.py
│ ├── functions.py
│ ├── introspection.py
│ ├── operations.py
│ ├── compiler.py
│ ├── base.py
│ └── schema.py
├── testapp
├── __init__.py
├── tests
│ ├── __init__.py
│ ├── test_fields.py
│ ├── test_expressions.py
│ └── test_constraints.py
├── migrations
│ ├── __init__.py
│ ├── 0007_test_remove_onetoone_field_part2.py
│ ├── 0003_test_unique_nullable_part2.py
│ ├── 0002_test_unique_nullable_part1.py
│ ├── 0006_test_remove_onetoone_field_part1.py
│ ├── 0004_test_issue45_unique_type_change_part1.py
│ ├── 0005_test_issue45_unique_type_change_part2.py
│ └── 0001_initial.py
├── runner.py
├── models.py
└── settings.py
├── MANIFEST.in
├── setup.cfg
├── .gitignore
├── manage.py
├── .editorconfig
├── tox.ini
├── docker
├── docker-compose.yml
└── Dockerfile
├── setup.py
├── LICENSE
├── test.sh
├── .travis.yml
├── .github
└── workflows
│ └── main.yml
└── README.rst
/sql_server/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/testapp/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/testapp/tests/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/testapp/migrations/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/sql_server/pyodbc/management/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/sql_server/pyodbc/management/commands/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/sql_server/pyodbc/__init__.py:
--------------------------------------------------------------------------------
1 | import sql_server.pyodbc.functions # noqa
2 |
--------------------------------------------------------------------------------
/sql_server/pyodbc/regex_clr.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ESSolutions/django-mssql-backend/HEAD/sql_server/pyodbc/regex_clr.dll
--------------------------------------------------------------------------------
/MANIFEST.in:
--------------------------------------------------------------------------------
1 | include LICENSE
2 | include MANIFEST.in
3 | include README.rst
4 | recursive-include sql_server *.py
5 | recursive-exclude docker *
6 |
--------------------------------------------------------------------------------
/setup.cfg:
--------------------------------------------------------------------------------
1 | [flake8]
2 | exclude = .git,__pycache__,migrations
3 | # W504 is mutually exclusive with W503
4 | ignore = W504
5 | max-line-length = 119
6 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | *.py[co]
2 | *.sw[a-z]
3 | *.orig
4 | *~
5 | .DS_Store
6 | Thumbs.db
7 |
8 | *.egg-info
9 |
10 | tests/local_settings.py
11 |
12 | # Virtual Env
13 | /venv/
14 | .idea/
15 |
--------------------------------------------------------------------------------
/testapp/tests/test_fields.py:
--------------------------------------------------------------------------------
1 | from django.test import TestCase
2 |
3 | from ..models import UUIDModel
4 |
5 |
6 | class TestUUIDField(TestCase):
7 | def test_create(self):
8 | UUIDModel.objects.create()
9 |
--------------------------------------------------------------------------------
/manage.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | import os
3 | import sys
4 |
5 | if __name__ == "__main__":
6 | os.environ.setdefault("DJANGO_SETTINGS_MODULE", "testapp.settings")
7 |
8 | from django.core.management import execute_from_command_line
9 |
10 | execute_from_command_line(sys.argv)
11 |
--------------------------------------------------------------------------------
/.editorconfig:
--------------------------------------------------------------------------------
1 | # https://editorconfig.org/
2 |
3 | root = true
4 |
5 | [*]
6 | indent_style = space
7 | indent_size = 4
8 | insert_final_newline = true
9 | trim_trailing_whitespace = true
10 | end_of_line = lf
11 | charset = utf-8
12 | max_line_length = 119
13 |
14 | [*.{yml,yaml}]
15 | indent_size = 2
16 |
--------------------------------------------------------------------------------
/testapp/migrations/0007_test_remove_onetoone_field_part2.py:
--------------------------------------------------------------------------------
1 | # Generated by Django 3.0.4 on 2020-04-20 14:59
2 |
3 | from django.db import migrations
4 |
5 |
6 | class Migration(migrations.Migration):
7 |
8 | dependencies = [
9 | ('testapp', '0006_test_remove_onetoone_field_part1'),
10 | ]
11 |
12 | operations = [
13 | migrations.RemoveField(
14 | model_name='testremoveonetoonefieldmodel',
15 | name='b',
16 | ),
17 | ]
18 |
--------------------------------------------------------------------------------
/tox.ini:
--------------------------------------------------------------------------------
1 | [tox]
2 | envlist =
3 | {py36,py37}-django22,
4 | {py36,py37,py38}-django30,
5 | {py36,py37,py38}-django31,
6 |
7 | [testenv]
8 | passenv =
9 | DATABASE_URL
10 | DATABASE_URL_OTHER
11 |
12 | whitelist_externals =
13 | /bin/bash
14 |
15 | commands =
16 | python manage.py test --keepdb
17 | python manage.py install_regex_clr test_default
18 | bash test.sh
19 |
20 | deps =
21 | django22: django==2.2.*
22 | django30: django>=3.0a1,<3.1
23 | django31: django>=3.1,<3.2
24 | dj-database-url==0.5.0
25 |
--------------------------------------------------------------------------------
/testapp/migrations/0003_test_unique_nullable_part2.py:
--------------------------------------------------------------------------------
1 | from django.db import migrations, models
2 |
3 |
4 | class Migration(migrations.Migration):
5 |
6 | dependencies = [
7 | ('testapp', '0002_test_unique_nullable_part1'),
8 | ]
9 |
10 | operations = [
11 | # Issue #38 test
12 | # Now remove the null=True to check this transition is correctly handled.
13 | migrations.AlterField(
14 | model_name='testuniquenullablemodel',
15 | name='test_field',
16 | field=models.CharField(default='', max_length=100, unique=True),
17 | preserve_default=False,
18 | ),
19 | ]
20 |
--------------------------------------------------------------------------------
/docker/docker-compose.yml:
--------------------------------------------------------------------------------
1 | version: "3.3"
2 |
3 | services:
4 | app:
5 | build:
6 | context: ..
7 | dockerfile: docker/Dockerfile
8 | volumes:
9 | - ..:/code
10 | environment:
11 | DATABASE_URL: "mssql://SA:MyPassword42@db:1433/default?isolation_level=read committed&driver=ODBC Driver 17 for SQL Server"
12 | DATABASE_URL_OTHER: "mssql://SA:MyPassword42@db:1433/other?isolation_level=read committed&driver=ODBC Driver 17 for SQL Server"
13 | depends_on:
14 | - db
15 |
16 | db:
17 | image: "mcr.microsoft.com/mssql/server:2017-latest-ubuntu"
18 | environment:
19 | ACCEPT_EULA: Y
20 | SA_PASSWORD: MyPassword42
21 | ports:
22 | - 1433:1433
23 |
--------------------------------------------------------------------------------
/docker/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM python:3.7
2 |
3 | ENV APT_KEY_DONT_WARN_ON_DANGEROUS_USAGE=DontWarn
4 |
5 | RUN apt-get update && apt-get install -y apt-transport-https git
6 |
7 | RUN curl https://packages.microsoft.com/keys/microsoft.asc | apt-key add -
8 | RUN curl https://packages.microsoft.com/config/debian/9/prod.list > /etc/apt/sources.list.d/mssql-release.list
9 | RUN apt-get update
10 | RUN ACCEPT_EULA=Y apt-get install -y msodbcsql17 g++ unixodbc-dev
11 |
12 | ADD . /code
13 | WORKDIR /code
14 | RUN pip install -e .["tests"]
15 |
16 | ENV PYTHONPATH=$PYTHONPATH:/code
17 |
18 | RUN git clone --branch=stable/2.2.x https://github.com/django/django.git "/code/django" --depth=1
19 |
20 | RUN pip install -r /code/django/tests/requirements/py3.txt
21 |
--------------------------------------------------------------------------------
/testapp/migrations/0002_test_unique_nullable_part1.py:
--------------------------------------------------------------------------------
1 | from django.db import migrations, models
2 |
3 |
4 | class Migration(migrations.Migration):
5 |
6 | dependencies = [
7 | ('testapp', '0001_initial'),
8 | ]
9 |
10 | operations = [
11 | # Issue #38 test prep
12 | # Create with a field that is unique *and* nullable so it is implemented with a filtered unique index.
13 | migrations.CreateModel(
14 | name='TestUniqueNullableModel',
15 | fields=[
16 | ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
17 | ('test_field', models.CharField(max_length=100, null=True, unique=True)),
18 | ],
19 | ),
20 | ]
21 |
--------------------------------------------------------------------------------
/testapp/runner.py:
--------------------------------------------------------------------------------
1 | from unittest import skip
2 | from django.test.runner import DiscoverRunner
3 | from django.conf import settings
4 |
5 |
6 | EXCLUDED_TESTS = getattr(settings, 'EXCLUDED_TESTS', [])
7 |
8 |
9 | class ExcludeTestSuiteRunner(DiscoverRunner):
10 | def build_suite(self, *args, **kwargs):
11 | suite = super().build_suite(*args, **kwargs)
12 | for case in suite:
13 | cls = case.__class__
14 | for attr in dir(cls):
15 | if not attr.startswith('test_'):
16 | continue
17 | fullname = f'{cls.__module__}.{cls.__name__}.{attr}'
18 | if len(list(filter(fullname.startswith, EXCLUDED_TESTS))):
19 | setattr(cls, attr, skip('Does not work on MSSQL')(getattr(cls, attr)))
20 |
21 | return suite
22 |
--------------------------------------------------------------------------------
/testapp/migrations/0006_test_remove_onetoone_field_part1.py:
--------------------------------------------------------------------------------
1 | # Generated by Django 3.0.4 on 2020-04-20 14:59
2 |
3 | from django.db import migrations, models
4 | import django.db.models.deletion
5 |
6 |
7 | class Migration(migrations.Migration):
8 |
9 | dependencies = [
10 | ('testapp', '0005_test_issue45_unique_type_change_part2'),
11 | ]
12 |
13 | operations = [
14 | migrations.CreateModel(
15 | name='TestRemoveOneToOneFieldModel',
16 | fields=[
17 | ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
18 | ('a', models.CharField(max_length=50)),
19 | ('b', models.OneToOneField(null=True, on_delete=django.db.models.deletion.SET_NULL, to='testapp.TestRemoveOneToOneFieldModel')),
20 | ],
21 | ),
22 | ]
23 |
--------------------------------------------------------------------------------
/sql_server/pyodbc/management/commands/install_regex_clr.py:
--------------------------------------------------------------------------------
1 | # Add regex support in SQLServer
2 | # Code taken from django-mssql (see https://bitbucket.org/Manfre/django-mssql)
3 |
4 | from django.core.management.base import BaseCommand
5 | from django.db import connection
6 |
7 |
8 | class Command(BaseCommand):
9 | help = "Installs the regex_clr.dll assembly with the database"
10 |
11 | requires_model_validation = False
12 |
13 | args = 'database_name'
14 |
15 | def add_arguments(self, parser):
16 | parser.add_argument('database_name')
17 |
18 | def handle(self, *args, **options):
19 | database_name = options['database_name']
20 | if not database_name:
21 | self.print_help('manage.py', 'install_regex_clr')
22 | return
23 |
24 | connection.creation.install_regex_clr(database_name)
25 | print('Installed regex_clr to database %s' % database_name)
26 |
--------------------------------------------------------------------------------
/testapp/migrations/0004_test_issue45_unique_type_change_part1.py:
--------------------------------------------------------------------------------
1 | from django.db import migrations, models
2 |
3 |
4 | class Migration(migrations.Migration):
5 |
6 | dependencies = [
7 | ('testapp', '0003_test_unique_nullable_part2'),
8 | ]
9 |
10 | # Issue #45 test prep
11 | operations = [
12 | # for case 1:
13 | migrations.AddField(
14 | model_name='testuniquenullablemodel',
15 | name='x',
16 | field=models.CharField(max_length=10, null=True, unique=True),
17 | ),
18 |
19 | # for case 2:
20 | migrations.CreateModel(
21 | name='TestNullableUniqueTogetherModel',
22 | fields=[
23 | ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
24 | ('a', models.CharField(max_length=50, null=True)),
25 | ('b', models.CharField(max_length=50)),
26 | ('c', models.CharField(max_length=50)),
27 | ],
28 | options={
29 | 'unique_together': {('a', 'b')},
30 | },
31 | ),
32 | ]
33 |
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
1 | from setuptools import find_packages, setup
2 |
3 | CLASSIFIERS = [
4 | 'License :: OSI Approved :: BSD License',
5 | 'Framework :: Django',
6 | "Operating System :: POSIX :: Linux",
7 | "Operating System :: Microsoft :: Windows",
8 | 'Programming Language :: Python',
9 | 'Programming Language :: Python :: 3',
10 | 'Programming Language :: Python :: 3.5',
11 | 'Programming Language :: Python :: 3.6',
12 | 'Programming Language :: Python :: 3.7',
13 | 'Framework :: Django :: 2.2',
14 | 'Framework :: Django :: 3.0',
15 | ]
16 |
17 | setup(
18 | name='django-mssql-backend',
19 | version='2.8.1',
20 | description='Django backend for Microsoft SQL Server',
21 | long_description=open('README.rst').read(),
22 | author='ES Solutions AB',
23 | author_email='info@essolutions.se',
24 | url='https://github.com/ESSolutions/django-mssql-backend',
25 | license='BSD',
26 | packages=find_packages(),
27 | install_requires=[
28 | 'pyodbc>=3.0',
29 | ],
30 | package_data={'sql_server.pyodbc': ['regex_clr.dll']},
31 | classifiers=CLASSIFIERS,
32 | keywords='django',
33 | )
34 |
--------------------------------------------------------------------------------
/testapp/migrations/0005_test_issue45_unique_type_change_part2.py:
--------------------------------------------------------------------------------
1 | from django.db import migrations, models
2 |
3 |
4 | class Migration(migrations.Migration):
5 |
6 | dependencies = [
7 | ('testapp', '0004_test_issue45_unique_type_change_part1'),
8 | ]
9 |
10 | # Issue #45 test
11 | operations = [
12 | # Case 1: changing max_length changes the column type - the filtered UNIQUE INDEX which implements
13 | # the nullable unique constraint, should be correctly reinstated after this change of column type
14 | # (see also the specific unit test which checks that multiple rows with NULL are allowed)
15 | migrations.AlterField(
16 | model_name='testuniquenullablemodel',
17 | name='x',
18 | field=models.CharField(max_length=11, null=True, unique=True),
19 | ),
20 |
21 | # Case 2: the filtered UNIQUE INDEX implementing the partially nullable `unique_together` constraint
22 | # should be correctly reinstated after this column type change
23 | migrations.AlterField(
24 | model_name='testnullableuniquetogethermodel',
25 | name='a',
26 | field=models.CharField(max_length=51, null=True),
27 | ),
28 | # ...similarly adding another field to the `unique_together` should preserve the constraint correctly
29 | migrations.AlterUniqueTogether(
30 | name='testnullableuniquetogethermodel',
31 | unique_together={('a', 'b', 'c')},
32 | ),
33 | ]
34 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | BSD 3-Clause License
2 |
3 | Copyright (c) 2019, ES Solutions AB
4 | All rights reserved.
5 |
6 | Redistribution and use in source and binary forms, with or without
7 | modification, are permitted provided that the following conditions are met:
8 |
9 | * Redistributions of source code must retain the above copyright notice, this
10 | list of conditions and the following disclaimer.
11 |
12 | * Redistributions in binary form must reproduce the above copyright notice,
13 | this list of conditions and the following disclaimer in the documentation
14 | and/or other materials provided with the distribution.
15 |
16 | * Neither the name of the copyright holder nor the names of its
17 | contributors may be used to endorse or promote products derived from
18 | this software without specific prior written permission.
19 |
20 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
21 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
22 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
23 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
24 | FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
25 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
26 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
27 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
28 | OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
29 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30 |
--------------------------------------------------------------------------------
/sql_server/pyodbc/client.py:
--------------------------------------------------------------------------------
1 | import re
2 | import subprocess
3 |
4 | from django.db.backends.base.client import BaseDatabaseClient
5 |
6 |
7 | class DatabaseClient(BaseDatabaseClient):
8 | executable_name = 'sqlcmd'
9 |
10 | def runshell(self):
11 | settings_dict = self.connection.settings_dict
12 | options = settings_dict['OPTIONS']
13 | user = options.get('user', settings_dict['USER'])
14 | password = options.get('passwd', settings_dict['PASSWORD'])
15 |
16 | driver = options.get('driver', 'ODBC Driver 13 for SQL Server')
17 | ms_drivers = re.compile('^ODBC Driver .* for SQL Server$|^SQL Server Native Client')
18 | if not ms_drivers.match(driver):
19 | self.executable_name = 'isql'
20 |
21 | if self.executable_name == 'sqlcmd':
22 | db = options.get('db', settings_dict['NAME'])
23 | server = options.get('host', settings_dict['HOST'])
24 | port = options.get('port', settings_dict['PORT'])
25 | defaults_file = options.get('read_default_file')
26 |
27 | args = [self.executable_name]
28 | if server:
29 | if port:
30 | server = ','.join((server, str(port)))
31 | args += ["-S", server]
32 | if user:
33 | args += ["-U", user]
34 | if password:
35 | args += ["-P", password]
36 | else:
37 | args += ["-E"] # Try trusted connection instead
38 | if db:
39 | args += ["-d", db]
40 | if defaults_file:
41 | args += ["-i", defaults_file]
42 | else:
43 | dsn = options.get('dsn', '')
44 | args = ['%s -v %s %s %s' % (self.executable_name, dsn, user, password)]
45 |
46 | try:
47 | subprocess.check_call(args)
48 | except KeyboardInterrupt:
49 | pass
50 |
--------------------------------------------------------------------------------
/testapp/tests/test_expressions.py:
--------------------------------------------------------------------------------
1 | from unittest import skipUnless
2 |
3 | from django import VERSION
4 | from django.db.models import IntegerField
5 | from django.db.models.expressions import Case, Exists, OuterRef, Subquery, Value, When
6 | from django.test import TestCase
7 |
8 | from ..models import Author, Comment, Post
9 |
10 | DJANGO3 = VERSION[0] >= 3
11 |
12 |
13 | class TestSubquery(TestCase):
14 | def setUp(self):
15 | self.author = Author.objects.create(name="author")
16 | self.post = Post.objects.create(title="foo", author=self.author)
17 |
18 | def test_with_count(self):
19 | newest = Comment.objects.filter(post=OuterRef('pk')).order_by('-created_at')
20 | Post.objects.annotate(
21 | post_exists=Subquery(newest.values('text')[:1])
22 | ).filter(post_exists=True).count()
23 |
24 |
25 | class TestExists(TestCase):
26 | def setUp(self):
27 | self.author = Author.objects.create(name="author")
28 | self.post = Post.objects.create(title="foo", author=self.author)
29 |
30 | def test_with_count(self):
31 | Post.objects.annotate(
32 | post_exists=Exists(Post.objects.all())
33 | ).filter(post_exists=True).count()
34 |
35 | @skipUnless(DJANGO3, "Django 3 specific tests")
36 | def test_with_case_when(self):
37 | author = Author.objects.annotate(
38 | has_post=Case(
39 | When(Exists(Post.objects.filter(author=OuterRef('pk')).values('pk')), then=Value(1)),
40 | default=Value(0),
41 | output_field=IntegerField(),
42 | )
43 | ).get()
44 | self.assertEqual(author.has_post, 1)
45 |
46 | @skipUnless(DJANGO3, "Django 3 specific tests")
47 | def test_order_by_exists(self):
48 | author_without_posts = Author.objects.create(name="other author")
49 | authors_by_posts = Author.objects.order_by(Exists(Post.objects.filter(author=OuterRef('pk'))).desc())
50 | self.assertSequenceEqual(authors_by_posts, [self.author, author_without_posts])
51 |
52 | authors_by_posts = Author.objects.order_by(Exists(Post.objects.filter(author=OuterRef('pk'))).asc())
53 | self.assertSequenceEqual(authors_by_posts, [author_without_posts, self.author])
54 |
--------------------------------------------------------------------------------
/testapp/models.py:
--------------------------------------------------------------------------------
1 | import uuid
2 |
3 | from django.db import models
4 | from django.utils import timezone
5 |
6 |
7 | class Author(models.Model):
8 | name = models.CharField(max_length=100)
9 |
10 |
11 | class Editor(models.Model):
12 | name = models.CharField(max_length=100)
13 |
14 |
15 | class Post(models.Model):
16 | title = models.CharField('title', max_length=255)
17 | author = models.ForeignKey(Author, models.CASCADE)
18 | # Optional secondary author
19 | alt_editor = models.ForeignKey(Editor, models.SET_NULL, blank=True, null=True)
20 |
21 | class Meta:
22 | unique_together = (
23 | ('author', 'title', 'alt_editor'),
24 | )
25 |
26 | def __str__(self):
27 | return self.title
28 |
29 |
30 | class Comment(models.Model):
31 | post = models.ForeignKey(Post, on_delete=models.CASCADE)
32 | text = models.TextField('text')
33 | created_at = models.DateTimeField(default=timezone.now)
34 |
35 | def __str__(self):
36 | return self.text
37 |
38 |
39 | class UUIDModel(models.Model):
40 | id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
41 |
42 | def __str__(self):
43 | return self.pk
44 |
45 |
46 | class TestUniqueNullableModel(models.Model):
47 | # Issue #38:
48 | # This field started off as unique=True *and* null=True so it is implemented with a filtered unique index
49 | # Then it is made non-nullable by a subsequent migration, to check this is correctly handled (the index
50 | # should be dropped, then a normal unique constraint should be added, now that the column is not nullable)
51 | test_field = models.CharField(max_length=100, unique=True)
52 |
53 | # Issue #45 (case 1)
54 | # Field used for testing changing the 'type' of a field that's both unique & nullable
55 | x = models.CharField(max_length=11, null=True, unique=True)
56 |
57 |
58 | class TestNullableUniqueTogetherModel(models.Model):
59 | class Meta:
60 | unique_together = (('a', 'b', 'c'),)
61 |
62 | # Issue #45 (case 2)
63 | # Fields used for testing changing the 'type of a field that is in a `unique_together`
64 | a = models.CharField(max_length=51, null=True)
65 | b = models.CharField(max_length=50)
66 | c = models.CharField(max_length=50)
67 |
68 |
69 | class TestRemoveOneToOneFieldModel(models.Model):
70 | # Fields used for testing removing OneToOne field. Verifies that delete_unique do not try to remove indexes
71 | # thats already is removed.
72 | # b = models.OneToOneField('self', on_delete=models.SET_NULL, null=True)
73 | a = models.CharField(max_length=50)
74 |
--------------------------------------------------------------------------------
/testapp/migrations/0001_initial.py:
--------------------------------------------------------------------------------
1 | # Generated by Django 2.2.8.dev20191112211527 on 2019-11-15 01:38
2 |
3 | import uuid
4 |
5 | from django.db import migrations, models
6 | import django
7 |
8 |
9 | class Migration(migrations.Migration):
10 |
11 | initial = True
12 |
13 | dependencies = [
14 | ]
15 |
16 | operations = [
17 | migrations.CreateModel(
18 | name='Author',
19 | fields=[
20 | ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
21 | ('name', models.CharField(max_length=100)),
22 | ],
23 | ),
24 | migrations.CreateModel(
25 | name='Editor',
26 | fields=[
27 | ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
28 | ('name', models.CharField(max_length=100)),
29 | ],
30 | ),
31 | migrations.CreateModel(
32 | name='Post',
33 | fields=[
34 | ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
35 | ('title', models.CharField(max_length=255, verbose_name='title')),
36 | ],
37 | ),
38 | migrations.AddField(
39 | model_name='post',
40 | name='alt_editor',
41 | field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='testapp.Editor'),
42 | ),
43 | migrations.AddField(
44 | model_name='post',
45 | name='author',
46 | field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='testapp.Author'),
47 | ),
48 | migrations.AlterUniqueTogether(
49 | name='post',
50 | unique_together={('author', 'title', 'alt_editor')},
51 | ),
52 | migrations.CreateModel(
53 | name='Comment',
54 | fields=[
55 | ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
56 | ('post', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='testapp.Post')),
57 | ('text', models.TextField(verbose_name='text')),
58 | ('created_at', models.DateTimeField(default=django.utils.timezone.now)),
59 | ],
60 | ),
61 | migrations.CreateModel(
62 | name='UUIDModel',
63 | fields=[
64 | ('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
65 | ],
66 | ),
67 | ]
68 |
--------------------------------------------------------------------------------
/sql_server/pyodbc/features.py:
--------------------------------------------------------------------------------
1 | from django.db.backends.base.features import BaseDatabaseFeatures
2 | from django.utils.functional import cached_property
3 |
4 |
5 | class DatabaseFeatures(BaseDatabaseFeatures):
6 | can_introspect_json_field = False
7 | has_native_json_field = False
8 | has_native_uuid_field = False
9 | allow_sliced_subqueries_with_in = False
10 | can_introspect_autofield = True
11 | can_introspect_small_integer_field = True
12 | can_return_columns_from_insert = True
13 | can_return_id_from_insert = True
14 | can_use_chunked_reads = False
15 | for_update_after_from = True
16 | greatest_least_ignores_nulls = True
17 | has_real_datatype = True
18 | has_select_for_update = True
19 | has_select_for_update_nowait = True
20 | has_select_for_update_skip_locked = True
21 | has_zoneinfo_database = False
22 | ignores_table_name_case = True
23 | ignores_quoted_identifier_case = True
24 | requires_literal_defaults = True
25 | requires_sqlparse_for_splitting = False
26 | supports_boolean_expr_in_select_clause = False
27 | supports_deferrable_unique_constraints = False
28 | supports_ignore_conflicts = False
29 | supports_index_on_text_field = False
30 | supports_paramstyle_pyformat = False
31 | supports_regex_backreferencing = True
32 | supports_sequence_reset = False
33 | supports_subqueries_in_group_by = False
34 | supports_tablespaces = True
35 | supports_temporal_subtraction = True
36 | supports_timezones = False
37 | supports_transactions = True
38 | uses_savepoints = True
39 | supports_order_by_nulls_modifier = False
40 | supports_order_by_is_nulls = False
41 | order_by_nulls_first = True
42 |
43 | @cached_property
44 | def has_bulk_insert(self):
45 | return self.connection.sql_server_version > 2005
46 |
47 | @cached_property
48 | def supports_nullable_unique_constraints(self):
49 | return self.connection.sql_server_version > 2005
50 |
51 | @cached_property
52 | def supports_partially_nullable_unique_constraints(self):
53 | return self.connection.sql_server_version > 2005
54 |
55 | @cached_property
56 | def supports_partial_indexes(self):
57 | return self.connection.sql_server_version > 2005
58 |
59 | @cached_property
60 | def supports_functions_in_partial_indexes(self):
61 | return self.connection.sql_server_version > 2005
62 |
63 | @cached_property
64 | def introspected_field_types(self):
65 | return {
66 | **super().introspected_field_types,
67 | 'GenericIPAddressField': 'CharField',
68 | 'PositiveBigIntegerField': 'BigIntegerField'
69 | }
70 |
--------------------------------------------------------------------------------
/test.sh:
--------------------------------------------------------------------------------
1 | # TODO:
2 | #
3 | # * m2m_through_regress
4 | # * many_to_one_null
5 |
6 | set -e
7 |
8 | DJANGO_VERSION="$(python -m django --version)"
9 |
10 | cd django
11 | git fetch -q --depth=1 origin +refs/tags/*:refs/tags/*
12 | git checkout -q $DJANGO_VERSION
13 | pip install -q -r tests/requirements/py3.txt
14 |
15 | python tests/runtests.py --settings=testapp.settings --noinput --keepdb \
16 | aggregation \
17 | aggregation_regress \
18 | annotations \
19 | backends \
20 | basic \
21 | bulk_create \
22 | constraints \
23 | custom_columns \
24 | custom_lookups \
25 | custom_managers \
26 | custom_methods \
27 | custom_migration_operations \
28 | custom_pk \
29 | datatypes \
30 | dates \
31 | datetimes \
32 | db_functions \
33 | db_typecasts \
34 | db_utils \
35 | dbshell \
36 | defer \
37 | defer_regress \
38 | delete \
39 | delete_regress \
40 | distinct_on_fields \
41 | empty \
42 | expressions \
43 | expressions_case \
44 | expressions_window \
45 | extra_regress \
46 | field_deconstruction \
47 | field_defaults \
48 | field_subclassing \
49 | filtered_relation \
50 | fixtures \
51 | fixtures_model_package \
52 | fixtures_regress \
53 | force_insert_update \
54 | foreign_object \
55 | from_db_value \
56 | generic_relations \
57 | generic_relations_regress \
58 | get_earliest_or_latest \
59 | get_object_or_404 \
60 | get_or_create \
61 | indexes \
62 | inspectdb \
63 | introspection \
64 | invalid_models_tests \
65 | known_related_objects \
66 | lookup \
67 | m2m_and_m2o \
68 | m2m_intermediary \
69 | m2m_multiple \
70 | m2m_recursive \
71 | m2m_regress \
72 | m2m_signals \
73 | m2m_through \
74 | m2o_recursive \
75 | managers_regress \
76 | many_to_many \
77 | many_to_one \
78 | max_lengths \
79 | migrate_signals \
80 | model_fields \
81 | model_indexes \
82 | model_options \
83 | mutually_referential \
84 | nested_foreign_keys \
85 | null_fk \
86 | null_fk_ordering \
87 | null_queries \
88 | one_to_one \
89 | or_lookups \
90 | order_with_respect_to \
91 | ordering \
92 | pagination \
93 | prefetch_related \
94 | queries \
95 | queryset_pickle \
96 | raw_query \
97 | reverse_lookup \
98 | save_delete_hooks \
99 | schema \
100 | select_for_update \
101 | select_related \
102 | select_related_onetoone \
103 | select_related_regress \
104 | transaction_hooks \
105 | transactions \
106 | update \
107 | update_only_fields
108 |
--------------------------------------------------------------------------------
/testapp/tests/test_constraints.py:
--------------------------------------------------------------------------------
1 | from django.db.utils import IntegrityError
2 | from django.test import TestCase, skipUnlessDBFeature
3 |
4 | from ..models import (
5 | Author, Editor, Post,
6 | TestUniqueNullableModel, TestNullableUniqueTogetherModel,
7 | )
8 |
9 |
10 | @skipUnlessDBFeature('supports_nullable_unique_constraints')
11 | class TestNullableUniqueColumn(TestCase):
12 | def test_multiple_nulls(self):
13 | # Issue #45 (case 1) - after field `x` has had its type changed, the filtered UNIQUE
14 | # INDEX which is implementing the nullable unique constraint should still be correctly
15 | # in place - i.e. allowing multiple NULLs but still enforcing uniqueness of non-NULLs
16 |
17 | # Allowed
18 | TestUniqueNullableModel.objects.create(x=None, test_field='randomness')
19 | TestUniqueNullableModel.objects.create(x=None, test_field='doesntmatter')
20 |
21 | # Disallowed
22 | TestUniqueNullableModel.objects.create(x="foo", test_field='irrelevant')
23 | with self.assertRaises(IntegrityError):
24 | TestUniqueNullableModel.objects.create(x="foo", test_field='nonsense')
25 |
26 |
27 | @skipUnlessDBFeature('supports_partially_nullable_unique_constraints')
28 | class TestPartiallyNullableUniqueTogether(TestCase):
29 | def test_partially_nullable(self):
30 | # Check basic behaviour of `unique_together` where at least 1 of the columns is nullable
31 |
32 | # It should be possible to have 2 rows both with NULL `alt_editor`
33 | author = Author.objects.create(name="author")
34 | Post.objects.create(title="foo", author=author)
35 | Post.objects.create(title="foo", author=author)
36 |
37 | # But `unique_together` is still enforced for non-NULL values
38 | editor = Editor.objects.create(name="editor")
39 | Post.objects.create(title="foo", author=author, alt_editor=editor)
40 | with self.assertRaises(IntegrityError):
41 | Post.objects.create(title="foo", author=author, alt_editor=editor)
42 |
43 | def test_after_type_change(self):
44 | # Issue #45 (case 2) - after one of the fields in the `unique_together` has had its
45 | # type changed in a migration, the constraint should still be correctly enforced
46 |
47 | # Multiple rows with a=NULL are considered different
48 | TestNullableUniqueTogetherModel.objects.create(a=None, b='bbb', c='ccc')
49 | TestNullableUniqueTogetherModel.objects.create(a=None, b='bbb', c='ccc')
50 |
51 | # Uniqueness still enforced for non-NULL values
52 | TestNullableUniqueTogetherModel.objects.create(a='aaa', b='bbb', c='ccc')
53 | with self.assertRaises(IntegrityError):
54 | TestNullableUniqueTogetherModel.objects.create(a='aaa', b='bbb', c='ccc')
55 |
--------------------------------------------------------------------------------
/.travis.yml:
--------------------------------------------------------------------------------
1 | sudo: required
2 | language: python
3 | dist: xenial
4 | cache: pip
5 |
6 | branches:
7 | only:
8 | - master
9 |
10 | env:
11 | global:
12 | - PYTHONPATH=$PYTHONPATH:$TRAVIS_BUILD_DIR/django
13 | - DATABASE_URL="mssql://SA:MyPassword42@localhost:1433/default?isolation_level=read committed&driver=ODBC Driver 17 for SQL Server"
14 | - DATABASE_URL_OTHER="mssql://SA:MyPassword42@localhost:1433/other?isolation_level=read committed&driver=ODBC Driver 17 for SQL Server"
15 |
16 | services: docker
17 |
18 | templates:
19 | linux_before_install: &linux_before_install
20 | - docker pull mcr.microsoft.com/mssql/server:2017-latest-ubuntu
21 | - docker run -e 'ACCEPT_EULA=Y' -e 'SA_PASSWORD=MyPassword42' -p 1433:1433 -d mcr.microsoft.com/mssql/server:2017-latest-ubuntu
22 | - curl https://packages.microsoft.com/keys/microsoft.asc | sudo apt-key add -
23 | - curl https://packages.microsoft.com/config/ubuntu/16.04/prod.list | sudo tee /etc/apt/sources.list.d/mssql-release.list
24 | - sudo apt-get update
25 | - sudo ACCEPT_EULA=Y apt-get install -y msodbcsql17 g++ unixodbc-dev
26 |
27 | win_before_install: &win_before_install
28 | - docker pull christianacca/mssql-server-windows-express:1803
29 | - docker run -e 'ACCEPT_EULA=Y' -e 'SA_PASSWORD=MyPassword42' -p 1433:1433 -d christianacca/mssql-server-windows-express:1803
30 | - wget https://download.microsoft.com/download/E/6/B/E6BFDC7A-5BCD-4C51-9912-635646DA801E/en-US/msodbcsql_17.3.1.1_x64.msi
31 | - powershell "Start-Process msiexec.exe -Wait -ArgumentList '/I msodbcsql_17.3.1.1_x64.msi /qn /norestart IACCEPTMSODBCSQLLICENSETERMS=YES'"
32 | - choco install python3 --version 3.7.2
33 | - export PATH="/c/Python37:/c/Python37/Scripts:$PATH"
34 |
35 | matrix:
36 | include:
37 | - env: FLAKE8
38 | python: "3.7"
39 | install: pip install flake8==3.7.1
40 | script: flake8
41 |
42 | - { before_install: *linux_before_install, python: "3.6", os: linux, env: TOX_ENV=py36-django22 }
43 | - { before_install: *linux_before_install, python: "3.6", os: linux, env: TOX_ENV=py36-django30 }
44 | - { before_install: *linux_before_install, python: "3.6", os: linux, env: TOX_ENV=py36-django31 }
45 |
46 | - { before_install: *linux_before_install, python: "3.7", os: linux, env: TOX_ENV=py37-django22 }
47 | - { before_install: *linux_before_install, python: "3.7", os: linux, env: TOX_ENV=py37-django30 }
48 | - { before_install: *linux_before_install, python: "3.7", os: linux, env: TOX_ENV=py37-django31 }
49 |
50 | - { before_install: *linux_before_install, python: "3.8", os: linux, env: TOX_ENV=py38-django30 }
51 | - { before_install: *linux_before_install, python: "3.8", os: linux, env: TOX_ENV=py38-django31 }
52 |
53 | - { before_install: *win_before_install, language: sh, python: "3.6", os: windows, env: TOX_ENV=py36-django22 }
54 | - { before_install: *win_before_install, language: sh, python: "3.6", os: windows, env: TOX_ENV=py36-django30 }
55 | - { before_install: *win_before_install, language: sh, python: "3.6", os: windows, env: TOX_ENV=py36-django31 }
56 |
57 | - { before_install: *win_before_install, language: sh, python: "3.7", os: windows, env: TOX_ENV=py37-django22 }
58 | - { before_install: *win_before_install, language: sh, python: "3.7", os: windows, env: TOX_ENV=py37-django30 }
59 | - { before_install: *win_before_install, language: sh, python: "3.7", os: windows, env: TOX_ENV=py37-django31 }
60 |
61 | - { before_install: *win_before_install, language: sh, python: "3.8", os: windows, env: TOX_ENV=py38-django30 }
62 | - { before_install: *win_before_install, language: sh, python: "3.8", os: windows, env: TOX_ENV=py38-django31 }
63 |
64 |
65 |
66 | install:
67 | - python -m pip install --upgrade pip wheel setuptools
68 | - pip install tox tox-travis tox-venv
69 | - git clone https://github.com/django/django.git
70 |
71 | script:
72 | - tox -e $TOX_ENV
73 |
--------------------------------------------------------------------------------
/.github/workflows/main.yml:
--------------------------------------------------------------------------------
1 | name: Tests
2 |
3 | on:
4 | push:
5 | branches:
6 | - master
7 | pull_request:
8 | branches:
9 | - master
10 |
11 | env:
12 | DATABASE_URL: "mssql://SA:MyPassword42@localhost:1433/default?isolation_level=read committed&driver=ODBC Driver 17 for SQL Server"
13 | DATABASE_URL_OTHER: "mssql://SA:MyPassword42@localhost:1433/other?isolation_level=read committed&driver=ODBC Driver 17 for SQL Server"
14 |
15 | jobs:
16 | linting:
17 | runs-on: ubuntu-latest
18 | steps:
19 | - uses: actions/checkout@v1
20 |
21 | - name: Set up Python
22 | uses: actions/setup-python@v1
23 | with:
24 | python-version: "3.8"
25 |
26 | - name: Install
27 | run: |
28 | python -m pip install --upgrade pip
29 | pip install flake8
30 | - name: Linting
31 | run: |
32 | flake8 --exclude testapp
33 |
34 | build:
35 | runs-on: ${{ matrix.os }}
36 |
37 | strategy:
38 | fail-fast: false
39 | matrix:
40 | os: [ubuntu-latest, windows-latest]
41 | tox_env:
42 | - "py36-django22"
43 | - "py36-django30"
44 | - "py36-django31"
45 |
46 | - "py37-django22"
47 | - "py37-django30"
48 | - "py37-django31"
49 |
50 | - "py38-django30"
51 | - "py38-django31"
52 |
53 | include:
54 | - python: "3.6"
55 | tox_env: "py36-django22"
56 |
57 | - python: "3.6"
58 | tox_env: "py36-django30"
59 |
60 | - python: "3.6"
61 | tox_env: "py36-django31"
62 |
63 | - python: "3.7"
64 | tox_env: "py37-django22"
65 |
66 | - python: "3.7"
67 | tox_env: "py37-django30"
68 |
69 | - python: "3.7"
70 | tox_env: "py37-django31"
71 |
72 | - python: "3.8"
73 | tox_env: "py38-django30"
74 |
75 | - python: "3.8"
76 | tox_env: "py38-django31"
77 |
78 |
79 | steps:
80 | - uses: actions/checkout@v2
81 | - uses: actions/checkout@v2
82 | with:
83 | repository: django/django
84 | path: django
85 | - name: Set up Python
86 | uses: actions/setup-python@v1
87 | with:
88 | python-version: ${{ matrix.python }}
89 |
90 | - name: Install Linux deps
91 | if: matrix.os == 'ubuntu-latest'
92 | run: |
93 | curl https://packages.microsoft.com/keys/microsoft.asc | sudo apt-key add -
94 | curl https://packages.microsoft.com/config/ubuntu/16.04/prod.list | sudo tee /etc/apt/sources.list.d/mssql-release.list
95 | sudo apt-get update
96 | sudo ACCEPT_EULA=Y apt-get install -y msodbcsql17 g++ unixodbc-dev libmemcached-dev
97 |
98 | - name: Install Windows deps
99 | if: matrix.os == 'windows-latest'
100 | run: |
101 | powershell wget https://download.microsoft.com/download/E/6/B/E6BFDC7A-5BCD-4C51-9912-635646DA801E/en-US/msodbcsql_17.3.1.1_x64.msi -OutFile msodbcsql_17.3.1.1_x64.msi
102 | powershell "Start-Process msiexec.exe -Wait -ArgumentList '/I msodbcsql_17.3.1.1_x64.msi /qn /norestart IACCEPTMSODBCSQLLICENSETERMS=YES'"
103 |
104 | - name: Install
105 | run: |
106 | python -m pip install --upgrade pip wheel setuptools
107 | pip install tox tox-venv
108 |
109 | - name: Test Linux
110 | if: matrix.os == 'ubuntu-latest'
111 | run: |
112 | docker run -e 'ACCEPT_EULA=Y' -e 'SA_PASSWORD=MyPassword42' -p 1433:1433 -d mcr.microsoft.com/mssql/server:2017-latest-ubuntu
113 | tox -e ${{ matrix.tox_env }}
114 |
115 | - name: Test Windows
116 | if: matrix.os == 'windows-latest'
117 | run: |
118 | docker run -e 'ACCEPT_EULA=Y' -e 'SA_PASSWORD=MyPassword42' -p 1433:1433 -d christianacca/mssql-server-windows-express:1809
119 | tox -e ${{ matrix.tox_env }}
120 |
--------------------------------------------------------------------------------
/sql_server/pyodbc/creation.py:
--------------------------------------------------------------------------------
1 | import binascii
2 | import os
3 |
4 | import django
5 | from django.db.backends.base.creation import BaseDatabaseCreation
6 |
7 |
8 | class DatabaseCreation(BaseDatabaseCreation):
9 | @property
10 | def cursor(self):
11 | if django.VERSION >= (3, 1):
12 | return self.connection._nodb_cursor
13 |
14 | return self.connection._nodb_connection.cursor
15 |
16 | def _destroy_test_db(self, test_database_name, verbosity):
17 | """
18 | Internal implementation - remove the test db tables.
19 | """
20 | # Remove the test database to clean up after
21 | # ourselves. Connect to the previous database (not the test database)
22 | # to do so, because it's not allowed to delete a database while being
23 | # connected to it.
24 | with self.cursor() as cursor:
25 | to_azure_sql_db = self.connection.to_azure_sql_db
26 | if not to_azure_sql_db:
27 | cursor.execute("ALTER DATABASE %s SET SINGLE_USER WITH ROLLBACK IMMEDIATE"
28 | % self.connection.ops.quote_name(test_database_name))
29 | cursor.execute("DROP DATABASE %s"
30 | % self.connection.ops.quote_name(test_database_name))
31 |
32 | def sql_table_creation_suffix(self):
33 | suffix = []
34 | collation = self.connection.settings_dict['TEST'].get('COLLATION', None)
35 | if collation:
36 | suffix.append('COLLATE %s' % collation)
37 | return ' '.join(suffix)
38 |
39 | # The following code to add regex support in SQLServer is taken from django-mssql
40 | # see https://bitbucket.org/Manfre/django-mssql
41 | def enable_clr(self):
42 | """ Enables clr for server if not already enabled
43 | This function will not fail if current user doesn't have
44 | permissions to enable clr, and clr is already enabled
45 | """
46 | with self.cursor() as cursor:
47 | # check whether clr is enabled
48 | cursor.execute('''
49 | SELECT value FROM sys.configurations
50 | WHERE name = 'clr enabled'
51 | ''')
52 | res = None
53 | try:
54 | res = cursor.fetchone()
55 | except Exception:
56 | pass
57 |
58 | if not res or not res[0]:
59 | # if not enabled enable clr
60 | cursor.execute("sp_configure 'clr enabled', 1")
61 | cursor.execute("RECONFIGURE")
62 |
63 | cursor.execute("sp_configure 'show advanced options', 1")
64 | cursor.execute("RECONFIGURE")
65 |
66 | cursor.execute("sp_configure 'clr strict security', 0")
67 | cursor.execute("RECONFIGURE")
68 |
69 | def install_regex_clr(self, database_name):
70 | sql = '''
71 | USE {database_name};
72 | -- Drop and recreate the function if it already exists
73 | IF OBJECT_ID('REGEXP_LIKE') IS NOT NULL
74 | DROP FUNCTION [dbo].[REGEXP_LIKE]
75 | IF EXISTS(select * from sys.assemblies where name like 'regex_clr')
76 | DROP ASSEMBLY regex_clr
77 | ;
78 | CREATE ASSEMBLY regex_clr
79 | FROM 0x{assembly_hex}
80 | WITH PERMISSION_SET = SAFE;
81 | create function [dbo].[REGEXP_LIKE]
82 | (
83 | @input nvarchar(max),
84 | @pattern nvarchar(max),
85 | @caseSensitive int
86 | )
87 | RETURNS INT AS
88 | EXTERNAL NAME regex_clr.UserDefinedFunctions.REGEXP_LIKE
89 | '''.format(
90 | database_name=self.connection.ops.quote_name(database_name),
91 | assembly_hex=self.get_regex_clr_assembly_hex(),
92 | ).split(';')
93 |
94 | self.enable_clr()
95 |
96 | with self.cursor() as cursor:
97 | for s in sql:
98 | cursor.execute(s)
99 |
100 | def get_regex_clr_assembly_hex(self):
101 | with open(os.path.join(os.path.dirname(__file__), 'regex_clr.dll'), 'rb') as f:
102 | return binascii.hexlify(f.read()).decode('ascii')
103 |
--------------------------------------------------------------------------------
/sql_server/pyodbc/functions.py:
--------------------------------------------------------------------------------
1 | from django import VERSION
2 | from django.db.models import BooleanField
3 | from django.db.models.functions import Cast
4 | from django.db.models.functions.math import ATan2, Log, Ln, Mod, Round
5 | from django.db.models.expressions import Case, Exists, OrderBy, When
6 | from django.db.models.lookups import Lookup
7 |
8 | DJANGO3 = VERSION[0] >= 3
9 |
10 |
11 | class TryCast(Cast):
12 | function = 'TRY_CAST'
13 |
14 |
15 | def sqlserver_as_sql(self, compiler, connection, template=None, **extra_context):
16 | template = template or self.template
17 | if connection.features.supports_order_by_nulls_modifier:
18 | if self.nulls_last:
19 | template = '%s NULLS LAST' % template
20 | elif self.nulls_first:
21 | template = '%s NULLS FIRST' % template
22 | else:
23 | if self.nulls_last and not (
24 | self.descending and connection.features.order_by_nulls_first
25 | ) and connection.features.supports_order_by_is_nulls:
26 | template = '%%(expression)s IS NULL, %s' % template
27 | elif self.nulls_first and not (
28 | not self.descending and connection.features.order_by_nulls_first
29 | ) and connection.features.supports_order_by_is_nulls:
30 | template = '%%(expression)s IS NOT NULL, %s' % template
31 | connection.ops.check_expression_support(self)
32 | expression_sql, params = compiler.compile(self.expression)
33 | placeholders = {
34 | 'expression': expression_sql,
35 | 'ordering': 'DESC' if self.descending else 'ASC',
36 | **extra_context,
37 | }
38 | template = template or self.template
39 | params *= template.count('%(expression)s')
40 | return (template % placeholders).rstrip(), params
41 |
42 |
43 | def sqlserver_atan2(self, compiler, connection, **extra_context):
44 | return self.as_sql(compiler, connection, function='ATN2', **extra_context)
45 |
46 |
47 | def sqlserver_log(self, compiler, connection, **extra_context):
48 | clone = self.copy()
49 | clone.set_source_expressions(self.get_source_expressions()[::-1])
50 | return clone.as_sql(compiler, connection, **extra_context)
51 |
52 |
53 | def sqlserver_ln(self, compiler, connection, **extra_context):
54 | return self.as_sql(compiler, connection, function='LOG', **extra_context)
55 |
56 |
57 | def sqlserver_mod(self, compiler, connection, **extra_context):
58 | return self.as_sql(compiler, connection, template='%(expressions)s', arg_joiner='%%', **extra_context)
59 |
60 |
61 | def sqlserver_round(self, compiler, connection, **extra_context):
62 | return self.as_sql(compiler, connection, template='%(function)s(%(expressions)s, 0)', **extra_context)
63 |
64 |
65 | def sqlserver_exists(self, compiler, connection, template=None, **extra_context):
66 | # MS SQL doesn't allow EXISTS() in the SELECT list, so wrap it with a
67 | # CASE WHEN expression. Change the template since the When expression
68 | # requires a left hand side (column) to compare against.
69 | sql, params = self.as_sql(compiler, connection, template, **extra_context)
70 | sql = 'CASE WHEN {} THEN 1 ELSE 0 END'.format(sql)
71 | return sql, params
72 |
73 |
74 | def sqlserver_lookup(self, compiler, connection):
75 | # MSSQL doesn't allow EXISTS() to be compared to another expression
76 | # unless it's wrapped in a CASE WHEN.
77 | wrapped = False
78 | exprs = []
79 | for expr in (self.lhs, self.rhs):
80 | if isinstance(expr, Exists):
81 | expr = Case(When(expr, then=True), default=False, output_field=BooleanField())
82 | wrapped = True
83 | exprs.append(expr)
84 | lookup = type(self)(*exprs) if wrapped else self
85 | return lookup.as_sql(compiler, connection)
86 |
87 |
88 | def sqlserver_orderby(self, compiler, connection):
89 | # MSSQL doesn't allow ORDER BY EXISTS() unless it's wrapped in
90 | # a CASE WHEN.
91 |
92 | template = None
93 | if self.nulls_last:
94 | template = 'CASE WHEN %(expression)s IS NULL THEN 1 ELSE 0 END, %(expression)s %(ordering)s'
95 | if self.nulls_first:
96 | template = 'CASE WHEN %(expression)s IS NULL THEN 0 ELSE 1 END, %(expression)s %(ordering)s'
97 |
98 | if isinstance(self.expression, Exists):
99 | copy = self.copy()
100 | copy.expression = Case(
101 | When(self.expression, then=True),
102 | default=False,
103 | output_field=BooleanField(),
104 | )
105 | return copy.as_sql(compiler, connection, template=template)
106 | return self.as_sql(compiler, connection, template=template)
107 |
108 |
109 | ATan2.as_microsoft = sqlserver_atan2
110 | Log.as_microsoft = sqlserver_log
111 | Ln.as_microsoft = sqlserver_ln
112 | Mod.as_microsoft = sqlserver_mod
113 | Round.as_microsoft = sqlserver_round
114 |
115 | if DJANGO3:
116 | Lookup.as_microsoft = sqlserver_lookup
117 | else:
118 | Exists.as_microsoft = sqlserver_exists
119 |
120 | OrderBy.as_microsoft = sqlserver_orderby
121 | OrderBy.as_sql = sqlserver_as_sql
122 |
--------------------------------------------------------------------------------
/README.rst:
--------------------------------------------------------------------------------
1 | django-mssql-backend
2 | ====================
3 |
4 | .. image:: https://img.shields.io/pypi/v/django-mssql-backend.svg
5 | :target: https://pypi.python.org/pypi/django-mssql-backend
6 |
7 | *django-mssql-backend* is a fork of
8 | `django-pyodbc-azure `__
9 |
10 | Features
11 | --------
12 |
13 | - Supports Django 2.2, 3.0
14 | - Supports Microsoft SQL Server 2008/2008R2, 2012, 2014, 2016, 2017, 2019
15 | - Passes most of the tests of the Django test suite
16 | - Compatible with
17 | `Micosoft ODBC Driver for SQL Server `__,
18 | `SQL Server Native Client `__,
19 | and `FreeTDS `__ ODBC drivers
20 |
21 | Dependencies
22 | ------------
23 |
24 | - Django 2.2 or newer
25 | - pyodbc 3.0 or newer
26 |
27 | Installation
28 | ------------
29 |
30 | 1. Install pyodbc and Django
31 |
32 | 2. Install django-mssql-backend ::
33 |
34 | pip install django-mssql-backend
35 |
36 | 3. Now you can point the ``ENGINE`` setting in the settings file used by
37 | your Django application or project to the ``'sql_server.pyodbc'``
38 | module path ::
39 |
40 | 'ENGINE': 'sql_server.pyodbc'
41 |
42 | Regex Support
43 | -------------
44 |
45 | django-mssql-backend supports regex using a CLR .dll file. To install it, run ::
46 |
47 | python manage.py install_regex_clr {database_name}
48 |
49 | Configuration
50 | -------------
51 |
52 | Standard Django settings
53 | ~~~~~~~~~~~~~~~~~~~~~~~~
54 |
55 | The following entries in a database-level settings dictionary
56 | in DATABASES control the behavior of the backend:
57 |
58 | - ENGINE
59 |
60 | String. It must be ``"sql_server.pyodbc"``.
61 |
62 | - NAME
63 |
64 | String. Database name. Required.
65 |
66 | - HOST
67 |
68 | String. SQL Server instance in ``"server\instance"`` format.
69 |
70 | - PORT
71 |
72 | String. Server instance port.
73 | An empty string means the default port.
74 |
75 | - USER
76 |
77 | String. Database user name in ``"user"`` format.
78 | If not given then MS Integrated Security will be used.
79 |
80 | - PASSWORD
81 |
82 | String. Database user password.
83 |
84 | - AUTOCOMMIT
85 |
86 | Boolean. Set this to False if you want to disable
87 | Django's transaction management and implement your own.
88 |
89 | and the following entries are also available in the TEST dictionary
90 | for any given database-level settings dictionary:
91 |
92 | - NAME
93 |
94 | String. The name of database to use when running the test suite.
95 | If the default value (``None``) is used, the test database will use
96 | the name "test\_" + ``NAME``.
97 |
98 | - COLLATION
99 |
100 | String. The collation order to use when creating the test database.
101 | If the default value (``None``) is used, the test database is assigned
102 | the default collation of the instance of SQL Server.
103 |
104 | - DEPENDENCIES
105 |
106 | String. The creation-order dependencies of the database.
107 | See the official Django documentation for more details.
108 |
109 | - MIRROR
110 |
111 | String. The alias of the database that this database should
112 | mirror during testing. Default value is ``None``.
113 | See the official Django documentation for more details.
114 |
115 | OPTIONS
116 | ~~~~~~~
117 |
118 | Dictionary. Current available keys are:
119 |
120 | - driver
121 |
122 | String. ODBC Driver to use (``"ODBC Driver 13 for SQL Server"``,
123 | ``"SQL Server Native Client 11.0"``, ``"FreeTDS"`` etc).
124 | Default is ``"ODBC Driver 13 for SQL Server"``.
125 |
126 | - isolation_level
127 |
128 | String. Sets `transaction isolation level
129 | `__
130 | for each database session. Valid values for this entry are
131 | ``READ UNCOMMITTED``, ``READ COMMITTED``, ``REPEATABLE READ``,
132 | ``SNAPSHOT``, and ``SERIALIZABLE``. Default is ``None`` which means
133 | no isolation levei is set to a database session and SQL Server default
134 | will be used.
135 |
136 | - dsn
137 |
138 | String. A named DSN can be used instead of ``HOST``.
139 |
140 | - host_is_server
141 |
142 | Boolean. Only relevant if using the FreeTDS ODBC driver under
143 | Unix/Linux.
144 |
145 | By default, when using the FreeTDS ODBC driver the value specified in
146 | the ``HOST`` setting is used in a ``SERVERNAME`` ODBC connection
147 | string component instead of being used in a ``SERVER`` component;
148 | this means that this value should be the name of a *dataserver*
149 | definition present in the ``freetds.conf`` FreeTDS configuration file
150 | instead of a hostname or an IP address.
151 |
152 | But if this option is present and its value is ``True``, this
153 | special behavior is turned off. Instead, connections to the database
154 | server will be established using ``HOST`` and ``PORT`` options, without
155 | requiring ``freetds.conf`` to be configured.
156 |
157 | See https://www.freetds.org/userguide/dsnless.html for more information.
158 |
159 | - unicode_results
160 |
161 | Boolean. If it is set to ``True``, pyodbc's *unicode_results* feature
162 | is activated and strings returned from pyodbc are always Unicode.
163 | Default value is ``False``.
164 |
165 | - extra_params
166 |
167 | String. Additional parameters for the ODBC connection. The format is
168 | ``"param=value;param=value"``.
169 |
170 | - collation
171 |
172 | String. Name of the collation to use when performing text field
173 | lookups against the database. Default is ``None``; this means no
174 | collation specifier is added to your lookup SQL (the default
175 | collation of your database will be used). For Chinese language you
176 | can set it to ``"Chinese_PRC_CI_AS"``.
177 |
178 | - connection_timeout
179 |
180 | Integer. Sets the timeout in seconds for the database connection process.
181 | Default value is ``0`` which disables the timeout.
182 |
183 | - connection_retries
184 |
185 | Integer. Sets the times to retry the database connection process.
186 | Default value is ``5``.
187 |
188 | - connection_retry_backoff_time
189 |
190 | Integer. Sets the back off time in seconds for reries of
191 | the database connection process. Default value is ``5``.
192 |
193 | - query_timeout
194 |
195 | Integer. Sets the timeout in seconds for the database query.
196 | Default value is ``0`` which disables the timeout.
197 |
198 | backend-specific settings
199 | ~~~~~~~~~~~~~~~~~~~~~~~~~
200 |
201 | The following project-level settings also control the behavior of the backend:
202 |
203 | - DATABASE_CONNECTION_POOLING
204 |
205 | Boolean. If it is set to ``False``, pyodbc's connection pooling feature
206 | won't be activated.
207 |
208 | Example
209 | ~~~~~~~
210 |
211 | Here is an example of the database settings:
212 |
213 | ::
214 |
215 | DATABASES = {
216 | 'default': {
217 | 'ENGINE': 'sql_server.pyodbc',
218 | 'NAME': 'mydb',
219 | 'USER': 'user@myserver',
220 | 'PASSWORD': 'password',
221 | 'HOST': 'myserver.database.windows.net',
222 | 'PORT': '',
223 |
224 | 'OPTIONS': {
225 | 'driver': 'ODBC Driver 13 for SQL Server',
226 | },
227 | },
228 | }
229 |
230 | # set this to False if you want to turn off pyodbc's connection pooling
231 | DATABASE_CONNECTION_POOLING = False
232 |
233 | Limitations
234 | -----------
235 |
236 | The following features are currently not supported:
237 |
238 | - Altering a model field from or to AutoField at migration
239 |
--------------------------------------------------------------------------------
/testapp/settings.py:
--------------------------------------------------------------------------------
1 | import dj_database_url
2 |
3 | DATABASES = {
4 | 'default': dj_database_url.config(default='sqlite:///db.sqlite'),
5 | 'other': dj_database_url.config(env='DATABASE_URL_OTHER', default='sqlite:///db.sqlite'),
6 | }
7 |
8 | INSTALLED_APPS = (
9 | 'django.contrib.contenttypes',
10 | 'django.contrib.staticfiles',
11 | 'django.contrib.auth',
12 | 'sql_server.pyodbc',
13 | 'testapp',
14 | )
15 |
16 |
17 | TEST_RUNNER = 'testapp.runner.ExcludeTestSuiteRunner'
18 | EXCLUDED_TESTS = (
19 | 'aggregation.tests.AggregateTestCase.test_aggregation_subquery_annotation_exists',
20 | 'aggregation.tests.AggregateTestCase.test_aggregation_subquery_annotation_values_collision',
21 | 'aggregation.tests.AggregateTestCase.test_count_star',
22 | 'aggregation.tests.AggregateTestCase.test_distinct_on_aggregate',
23 | 'aggregation.tests.AggregateTestCase.test_expression_on_aggregation',
24 | 'aggregation_regress.tests.AggregationTests.test_annotated_conditional_aggregate',
25 | 'aggregation_regress.tests.AggregationTests.test_annotation_with_value',
26 | 'aggregation_regress.tests.AggregationTests.test_more_more',
27 | 'aggregation_regress.tests.AggregationTests.test_more_more_more',
28 | 'aggregation_regress.tests.AggregationTests.test_ticket_11293',
29 | 'aggregation_regress.tests.AggregationTests.test_values_list_annotation_args_ordering',
30 | 'annotations.tests.NonAggregateAnnotationTestCase.test_annotate_exists',
31 | 'annotations.tests.NonAggregateAnnotationTestCase.test_combined_expression_annotation_with_aggregation',
32 | 'backends.tests.BackendTestCase.test_queries',
33 | 'backends.tests.BackendTestCase.test_unicode_password',
34 | 'backends.tests.FkConstraintsTests.test_disable_constraint_checks_context_manager',
35 | 'backends.tests.FkConstraintsTests.test_disable_constraint_checks_manually',
36 | 'backends.tests.LastExecutedQueryTest.test_last_executed_query',
37 | 'bulk_create.tests.BulkCreateTests.test_bulk_insert_nullable_fields',
38 | 'constraints.tests.CheckConstraintTests.test_abstract_name',
39 | 'constraints.tests.CheckConstraintTests.test_database_constraint',
40 | 'constraints.tests.CheckConstraintTests.test_database_constraint_expression',
41 | 'constraints.tests.CheckConstraintTests.test_database_constraint_expressionwrapper',
42 | 'constraints.tests.CheckConstraintTests.test_name',
43 | 'constraints.tests.UniqueConstraintTests.test_database_constraint',
44 | 'constraints.tests.UniqueConstraintTests.test_database_constraint_with_condition',
45 | 'constraints.tests.UniqueConstraintTests.test_name',
46 | 'custom_lookups.tests.BilateralTransformTests.test_transform_order_by',
47 | 'datatypes.tests.DataTypesTestCase.test_error_on_timezone',
48 | 'datetimes.tests.DateTimesTests.test_datetimes_ambiguous_and_invalid_times',
49 | 'datetimes.tests.DateTimesTests.test_datetimes_returns_available_dates_for_given_scope_and_given_field',
50 | 'datetimes.tests.DateTimesTests.test_related_model_traverse',
51 | 'db_functions.comparison.test_cast.CastTests.test_cast_to_integer',
52 | 'db_functions.datetime.test_extract_trunc.DateFunctionTests.test_extract_func',
53 | 'db_functions.datetime.test_extract_trunc.DateFunctionTests.test_extract_iso_weekday_func',
54 | 'db_functions.datetime.test_extract_trunc.DateFunctionTests.test_extract_year_exact_lookup',
55 | 'db_functions.datetime.test_extract_trunc.DateFunctionTests.test_extract_year_greaterthan_lookup',
56 | 'db_functions.datetime.test_extract_trunc.DateFunctionTests.test_extract_year_lessthan_lookup',
57 | 'db_functions.datetime.test_extract_trunc.DateFunctionTests.test_trunc_func',
58 | 'db_functions.datetime.test_extract_trunc.DateFunctionTests.test_trunc_week_func',
59 | 'db_functions.datetime.test_extract_trunc.DateFunctionWithTimeZoneTests.test_extract_func',
60 | 'db_functions.datetime.test_extract_trunc.DateFunctionWithTimeZoneTests.test_extract_func_with_timezone',
61 | 'db_functions.datetime.test_extract_trunc.DateFunctionWithTimeZoneTests.test_extract_iso_weekday_func',
62 | 'db_functions.datetime.test_extract_trunc.DateFunctionWithTimeZoneTests.test_extract_year_exact_lookup',
63 | 'db_functions.datetime.test_extract_trunc.DateFunctionWithTimeZoneTests.test_extract_year_greaterthan_lookup',
64 | 'db_functions.datetime.test_extract_trunc.DateFunctionWithTimeZoneTests.test_extract_year_lessthan_lookup',
65 | 'db_functions.datetime.test_extract_trunc.DateFunctionWithTimeZoneTests.test_trunc_ambiguous_and_invalid_times',
66 | 'db_functions.datetime.test_extract_trunc.DateFunctionWithTimeZoneTests.test_trunc_func_with_timezone',
67 | 'db_functions.datetime.test_extract_trunc.DateFunctionWithTimeZoneTests.test_trunc_none',
68 | 'db_functions.datetime.test_extract_trunc.DateFunctionWithTimeZoneTests.test_trunc_week_func',
69 | 'db_functions.math.test_degrees.DegreesTests.test_integer',
70 | 'db_functions.math.test_mod.ModTests.test_float',
71 | 'db_functions.math.test_power.PowerTests.test_integer',
72 | 'db_functions.math.test_radians.RadiansTests.test_integer',
73 | 'db_functions.text.test_md5',
74 | 'db_functions.text.test_pad.PadTests.test_pad',
75 | 'db_functions.text.test_replace.ReplaceTests.test_case_sensitive',
76 | 'db_functions.text.test_sha1',
77 | 'db_functions.text.test_sha224',
78 | 'db_functions.text.test_sha256',
79 | 'db_functions.text.test_sha384',
80 | 'db_functions.text.test_sha512',
81 | 'dbshell.tests.DbshellCommandTestCase.test_command_missing',
82 | 'defer_regress.tests.DeferRegressionTest.test_ticket_23270',
83 | 'delete.tests.DeletionTests.test_only_referenced_fields_selected',
84 | 'expressions.tests.BasicExpressionsTests.test_case_in_filter_if_boolean_output_field',
85 | 'expressions.tests.BasicExpressionsTests.test_filtering_on_annotate_that_uses_q',
86 | 'expressions.tests.BasicExpressionsTests.test_order_by_exists',
87 | 'expressions.tests.BasicExpressionsTests.test_subquery_in_filter',
88 | 'expressions.tests.ExpressionOperatorTests.test_lefthand_bitwise_right_shift_operator',
89 | 'expressions.tests.ExpressionOperatorTests.test_lefthand_bitwise_xor',
90 | 'expressions.tests.ExpressionOperatorTests.test_lefthand_bitwise_xor_null',
91 | 'expressions.tests.ExpressionOperatorTests.test_righthand_power',
92 | 'expressions.tests.FTimeDeltaTests.test_date_subquery_subtraction',
93 | 'expressions.tests.FTimeDeltaTests.test_datetime_subquery_subtraction',
94 | 'expressions.tests.FTimeDeltaTests.test_datetime_subtraction_microseconds',
95 | 'expressions.tests.FTimeDeltaTests.test_duration_with_datetime_microseconds',
96 | 'expressions.tests.FTimeDeltaTests.test_invalid_operator',
97 | 'expressions.tests.FTimeDeltaTests.test_time_subquery_subtraction',
98 | 'expressions.tests.IterableLookupInnerExpressionsTests.test_expressions_in_lookups_join_choice',
99 | 'expressions_case.tests.CaseExpressionTests.test_annotate_with_in_clause',
100 | 'fixtures_regress.tests.TestFixtures.test_loaddata_raises_error_when_fixture_has_invalid_foreign_key',
101 | 'fixtures_regress.tests.TestFixtures.test_loaddata_with_m2m_to_self',
102 | 'fixtures_regress.tests.TestFixtures.test_loaddata_with_valid_fixture_dirs',
103 | 'fixtures_regress.tests.TestFixtures.test_loaddata_works_when_fixture_has_forward_refs',
104 | 'fixtures_regress.tests.TestFixtures.test_path_containing_dots',
105 | 'fixtures_regress.tests.TestFixtures.test_pg_sequence_resetting_checks',
106 | 'fixtures_regress.tests.TestFixtures.test_pretty_print_xml',
107 | 'fixtures_regress.tests.TestFixtures.test_proxy_model_included',
108 | 'fixtures_regress.tests.TestFixtures.test_relative_path',
109 | 'fixtures_regress.tests.TestFixtures.test_relative_path_in_fixture_dirs',
110 | 'fixtures_regress.tests.TestFixtures.test_ticket_20820',
111 | 'fixtures_regress.tests.TestFixtures.test_ticket_22421',
112 | 'get_or_create.tests.UpdateOrCreateTransactionTests.test_creation_in_transaction',
113 | 'indexes.tests.PartialIndexTests.test_multiple_conditions',
114 | 'indexes.tests.SchemaIndexesNotPostgreSQLTests.test_create_index_ignores_opclasses',
115 | 'inspectdb.tests.InspectDBTestCase.test_introspection_errors',
116 | 'introspection.tests.IntrospectionTests.test_get_constraints',
117 | 'introspection.tests.IntrospectionTests.test_get_table_description_types',
118 | 'introspection.tests.IntrospectionTests.test_smallautofield',
119 | 'invalid_models_tests.test_ordinary_fields.TextFieldTests.test_max_length_warning',
120 | 'migrate_signals.tests.MigrateSignalTests.test_migrations_only',
121 | 'model_fields.test_integerfield.PositiveBigIntegerFieldTests',
122 | 'model_fields.test_jsonfield',
123 | 'model_indexes.tests.IndexesTests.test_db_tablespace',
124 | 'ordering.tests.OrderingTests.test_deprecated_values_annotate',
125 | 'ordering.tests.OrderingTests.test_order_by_fk_attname',
126 | 'ordering.tests.OrderingTests.test_order_by_pk',
127 | 'ordering.tests.OrderingTests.test_orders_nulls_first_on_filtered_subquery',
128 | 'prefetch_related.tests.GenericRelationTests.test_prefetch_GFK_nonint_pk',
129 | 'queries.test_bulk_update.BulkUpdateNoteTests.test_set_field_to_null',
130 | 'queries.test_bulk_update.BulkUpdateTests.test_json_field',
131 | 'queries.test_db_returning',
132 | 'queries.test_qs_combinators.QuerySetSetOperationTests.test_limits',
133 | 'queries.test_qs_combinators.QuerySetSetOperationTests.test_ordering_by_f_expression_and_alias',
134 | 'schema.tests.SchemaTests.test_add_foreign_key_quoted_db_table',
135 | 'schema.tests.SchemaTests.test_alter_auto_field_quoted_db_column',
136 | 'schema.tests.SchemaTests.test_alter_auto_field_to_char_field',
137 | 'schema.tests.SchemaTests.test_alter_auto_field_to_integer_field',
138 | 'schema.tests.SchemaTests.test_alter_autofield_pk_to_bigautofield_pk_sequence_owner',
139 | 'schema.tests.SchemaTests.test_alter_autofield_pk_to_smallautofield_pk_sequence_owner',
140 | 'schema.tests.SchemaTests.test_alter_implicit_id_to_explicit',
141 | 'schema.tests.SchemaTests.test_alter_int_pk_to_autofield_pk',
142 | 'schema.tests.SchemaTests.test_alter_int_pk_to_bigautofield_pk',
143 | 'schema.tests.SchemaTests.test_alter_pk_with_self_referential_field',
144 | 'schema.tests.SchemaTests.test_alter_primary_key_quoted_db_table',
145 | 'schema.tests.SchemaTests.test_alter_smallint_pk_to_smallautofield_pk',
146 | 'schema.tests.SchemaTests.test_char_field_pk_to_auto_field',
147 | 'schema.tests.SchemaTests.test_inline_fk',
148 | 'schema.tests.SchemaTests.test_no_db_constraint_added_during_primary_key_change',
149 | 'schema.tests.SchemaTests.test_remove_field_check_does_not_remove_meta_constraints',
150 | 'schema.tests.SchemaTests.test_remove_field_unique_does_not_remove_meta_constraints',
151 | 'schema.tests.SchemaTests.test_remove_unique_together_does_not_remove_meta_constraints',
152 | 'schema.tests.SchemaTests.test_text_field_with_db_index',
153 | 'schema.tests.SchemaTests.test_unique_and_reverse_m2m',
154 | 'schema.tests.SchemaTests.test_unique_no_unnecessary_fk_drops',
155 | 'schema.tests.SchemaTests.test_unique_together_with_fk',
156 | 'schema.tests.SchemaTests.test_unique_together_with_fk_with_existing_index',
157 | 'select_for_update.tests.SelectForUpdateTests.test_for_update_after_from',
158 | )
159 |
160 | SECRET_KEY = "django_tests_secret_key"
161 |
162 | # Use a fast hasher to speed up tests.
163 | PASSWORD_HASHERS = [
164 | 'django.contrib.auth.hashers.MD5PasswordHasher',
165 | ]
166 |
--------------------------------------------------------------------------------
/sql_server/pyodbc/introspection.py:
--------------------------------------------------------------------------------
1 | import pyodbc as Database
2 | from collections import namedtuple
3 |
4 | from django.db.backends.base.introspection import (
5 | BaseDatabaseIntrospection, TableInfo,
6 | )
7 | from django.db.models.indexes import Index
8 |
9 | SQL_AUTOFIELD = -777555
10 | SQL_BIGAUTOFIELD = -777444
11 |
12 | FieldInfo = namedtuple('FieldInfo', 'name type_code display_size internal_size precision scale null_ok default')
13 |
14 |
15 | class DatabaseIntrospection(BaseDatabaseIntrospection):
16 | # Map type codes to Django Field types.
17 | data_types_reverse = {
18 | SQL_AUTOFIELD: 'AutoField',
19 | SQL_BIGAUTOFIELD: 'BigAutoField',
20 | Database.SQL_BIGINT: 'BigIntegerField',
21 | # Database.SQL_BINARY: ,
22 | Database.SQL_BIT: 'BooleanField',
23 | Database.SQL_CHAR: 'CharField',
24 | Database.SQL_DECIMAL: 'DecimalField',
25 | Database.SQL_DOUBLE: 'FloatField',
26 | Database.SQL_FLOAT: 'FloatField',
27 | Database.SQL_GUID: 'TextField',
28 | Database.SQL_INTEGER: 'IntegerField',
29 | Database.SQL_LONGVARBINARY: 'BinaryField',
30 | # Database.SQL_LONGVARCHAR: ,
31 | Database.SQL_NUMERIC: 'DecimalField',
32 | Database.SQL_REAL: 'FloatField',
33 | Database.SQL_SMALLINT: 'SmallIntegerField',
34 | Database.SQL_SS_TIME2: 'TimeField',
35 | Database.SQL_TINYINT: 'SmallIntegerField',
36 | Database.SQL_TYPE_DATE: 'DateField',
37 | Database.SQL_TYPE_TIME: 'TimeField',
38 | Database.SQL_TYPE_TIMESTAMP: 'DateTimeField',
39 | Database.SQL_VARBINARY: 'BinaryField',
40 | Database.SQL_VARCHAR: 'TextField',
41 | Database.SQL_WCHAR: 'CharField',
42 | Database.SQL_WLONGVARCHAR: 'TextField',
43 | Database.SQL_WVARCHAR: 'TextField',
44 | }
45 |
46 | ignored_tables = []
47 |
48 | def get_field_type(self, data_type, description):
49 | field_type = super().get_field_type(data_type, description)
50 | # the max nvarchar length is described as 0 or 2**30-1
51 | # (it depends on the driver)
52 | size = description.internal_size
53 | if field_type == 'CharField':
54 | if size == 0 or size >= 2**30 - 1:
55 | field_type = "TextField"
56 | elif field_type == 'TextField':
57 | if size > 0 and size < 2**30 - 1:
58 | field_type = 'CharField'
59 | return field_type
60 |
61 | def get_table_list(self, cursor):
62 | """
63 | Returns a list of table and view names in the current database.
64 | """
65 | sql = 'SELECT TABLE_NAME, TABLE_TYPE FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_SCHEMA = SCHEMA_NAME()'
66 | cursor.execute(sql)
67 | types = {'BASE TABLE': 't', 'VIEW': 'v'}
68 | return [TableInfo(row[0], types.get(row[1]))
69 | for row in cursor.fetchall()
70 | if row[0] not in self.ignored_tables]
71 |
72 | def _is_auto_field(self, cursor, table_name, column_name):
73 | """
74 | Checks whether column is Identity
75 | """
76 | # COLUMNPROPERTY: http://msdn2.microsoft.com/en-us/library/ms174968.aspx
77 |
78 | # from django.db import connection
79 | # cursor.execute("SELECT COLUMNPROPERTY(OBJECT_ID(%s), %s, 'IsIdentity')",
80 | # (connection.ops.quote_name(table_name), column_name))
81 | cursor.execute("SELECT COLUMNPROPERTY(OBJECT_ID(%s), %s, 'IsIdentity')",
82 | (self.connection.ops.quote_name(table_name), column_name))
83 | return cursor.fetchall()[0][0]
84 |
85 | def get_table_description(self, cursor, table_name, identity_check=True):
86 | """Returns a description of the table, with DB-API cursor.description interface.
87 |
88 | The 'auto_check' parameter has been added to the function argspec.
89 | If set to True, the function will check each of the table's fields for the
90 | IDENTITY property (the IDENTITY property is the MSSQL equivalent to an AutoField).
91 |
92 | When an integer field is found with an IDENTITY property, it is given a custom field number
93 | of SQL_AUTOFIELD, which maps to the 'AutoField' value in the DATA_TYPES_REVERSE dict.
94 |
95 | When a bigint field is found with an IDENTITY property, it is given a custom field number
96 | of SQL_BIGAUTOFIELD, which maps to the 'BigAutoField' value in the DATA_TYPES_REVERSE dict.
97 | """
98 |
99 | # map pyodbc's cursor.columns to db-api cursor description
100 | columns = [[c[3], c[4], None, c[6], c[6], c[8], c[10], c[12]] for c in cursor.columns(table=table_name)]
101 | items = []
102 | for column in columns:
103 | if identity_check and self._is_auto_field(cursor, table_name, column[0]):
104 | if column[1] == Database.SQL_BIGINT:
105 | column[1] = SQL_BIGAUTOFIELD
106 | else:
107 | column[1] = SQL_AUTOFIELD
108 | if column[1] == Database.SQL_WVARCHAR and column[3] < 4000:
109 | column[1] = Database.SQL_WCHAR
110 | items.append(FieldInfo(*column))
111 | return items
112 |
113 | def get_sequences(self, cursor, table_name, table_fields=()):
114 | cursor.execute("""
115 | SELECT c.name FROM sys.columns c
116 | INNER JOIN sys.tables t ON c.object_id = t.object_id
117 | WHERE t.schema_id = SCHEMA_ID() AND t.name = %s AND c.is_identity = 1""",
118 | [table_name])
119 | # SQL Server allows only one identity column per table
120 | # https://docs.microsoft.com/en-us/sql/t-sql/statements/create-table-transact-sql-identity-property
121 | row = cursor.fetchone()
122 | return [{'table': table_name, 'column': row[0]}] if row else []
123 |
124 | def get_relations(self, cursor, table_name):
125 | """
126 | Returns a dictionary of {field_name: (field_name_other_table, other_table)}
127 | representing all relationships to the given table.
128 | """
129 | # CONSTRAINT_COLUMN_USAGE: http://msdn2.microsoft.com/en-us/library/ms174431.aspx
130 | # CONSTRAINT_TABLE_USAGE: http://msdn2.microsoft.com/en-us/library/ms179883.aspx
131 | # REFERENTIAL_CONSTRAINTS: http://msdn2.microsoft.com/en-us/library/ms179987.aspx
132 | # TABLE_CONSTRAINTS: http://msdn2.microsoft.com/en-us/library/ms181757.aspx
133 |
134 | sql = """
135 | SELECT e.COLUMN_NAME AS column_name,
136 | c.TABLE_NAME AS referenced_table_name,
137 | d.COLUMN_NAME AS referenced_column_name
138 | FROM INFORMATION_SCHEMA.TABLE_CONSTRAINTS AS a
139 | INNER JOIN INFORMATION_SCHEMA.REFERENTIAL_CONSTRAINTS AS b
140 | ON a.CONSTRAINT_NAME = b.CONSTRAINT_NAME AND a.TABLE_SCHEMA = b.CONSTRAINT_SCHEMA
141 | INNER JOIN INFORMATION_SCHEMA.CONSTRAINT_TABLE_USAGE AS c
142 | ON b.UNIQUE_CONSTRAINT_NAME = c.CONSTRAINT_NAME AND b.CONSTRAINT_SCHEMA = c.CONSTRAINT_SCHEMA
143 | INNER JOIN INFORMATION_SCHEMA.CONSTRAINT_COLUMN_USAGE AS d
144 | ON c.CONSTRAINT_NAME = d.CONSTRAINT_NAME AND c.CONSTRAINT_SCHEMA = d.CONSTRAINT_SCHEMA
145 | INNER JOIN INFORMATION_SCHEMA.CONSTRAINT_COLUMN_USAGE AS e
146 | ON a.CONSTRAINT_NAME = e.CONSTRAINT_NAME AND a.TABLE_SCHEMA = e.TABLE_SCHEMA
147 | WHERE a.TABLE_SCHEMA = SCHEMA_NAME() AND a.TABLE_NAME = %s AND a.CONSTRAINT_TYPE = 'FOREIGN KEY'"""
148 | cursor.execute(sql, (table_name,))
149 | return dict([[item[0], (item[2], item[1])] for item in cursor.fetchall()])
150 |
151 | def get_key_columns(self, cursor, table_name):
152 | """
153 | Returns a list of (column_name, referenced_table_name, referenced_column_name) for all
154 | key columns in given table.
155 | """
156 | key_columns = []
157 | cursor.execute("""
158 | SELECT c.name AS column_name, rt.name AS referenced_table_name, rc.name AS referenced_column_name
159 | FROM sys.foreign_key_columns fk
160 | INNER JOIN sys.tables t ON t.object_id = fk.parent_object_id
161 | INNER JOIN sys.columns c ON c.object_id = t.object_id AND c.column_id = fk.parent_column_id
162 | INNER JOIN sys.tables rt ON rt.object_id = fk.referenced_object_id
163 | INNER JOIN sys.columns rc ON rc.object_id = rt.object_id AND rc.column_id = fk.referenced_column_id
164 | WHERE t.schema_id = SCHEMA_ID() AND t.name = %s""", [table_name])
165 | key_columns.extend([tuple(row) for row in cursor.fetchall()])
166 | return key_columns
167 |
168 | def get_constraints(self, cursor, table_name):
169 | """
170 | Retrieves any constraints or keys (unique, pk, fk, check, index)
171 | across one or more columns.
172 |
173 | Returns a dict mapping constraint names to their attributes,
174 | where attributes is a dict with keys:
175 | * columns: List of columns this covers
176 | * primary_key: True if primary key, False otherwise
177 | * unique: True if this is a unique constraint, False otherwise
178 | * foreign_key: (table, column) of target, or None
179 | * check: True if check constraint, False otherwise
180 | * index: True if index, False otherwise.
181 | * orders: The order (ASC/DESC) defined for the columns of indexes
182 | * type: The type of the index (btree, hash, etc.)
183 | """
184 | constraints = {}
185 | # Loop over the key table, collecting things as constraints
186 | # This will get PKs, FKs, and uniques, but not CHECK
187 | cursor.execute("""
188 | SELECT
189 | kc.constraint_name,
190 | kc.column_name,
191 | tc.constraint_type,
192 | fk.referenced_table_name,
193 | fk.referenced_column_name
194 | FROM INFORMATION_SCHEMA.KEY_COLUMN_USAGE AS kc
195 | INNER JOIN INFORMATION_SCHEMA.TABLE_CONSTRAINTS AS tc ON
196 | kc.table_schema = tc.table_schema AND
197 | kc.table_name = tc.table_name AND
198 | kc.constraint_name = tc.constraint_name
199 | LEFT OUTER JOIN (
200 | SELECT
201 | ps.name AS table_schema,
202 | pt.name AS table_name,
203 | pc.name AS column_name,
204 | rt.name AS referenced_table_name,
205 | rc.name AS referenced_column_name
206 | FROM
207 | sys.foreign_key_columns fkc
208 | INNER JOIN sys.tables pt ON
209 | fkc.parent_object_id = pt.object_id
210 | INNER JOIN sys.schemas ps ON
211 | pt.schema_id = ps.schema_id
212 | INNER JOIN sys.columns pc ON
213 | fkc.parent_object_id = pc.object_id AND
214 | fkc.parent_column_id = pc.column_id
215 | INNER JOIN sys.tables rt ON
216 | fkc.referenced_object_id = rt.object_id
217 | INNER JOIN sys.schemas rs ON
218 | rt.schema_id = rs.schema_id
219 | INNER JOIN sys.columns rc ON
220 | fkc.referenced_object_id = rc.object_id AND
221 | fkc.referenced_column_id = rc.column_id
222 | ) fk ON
223 | kc.table_schema = fk.table_schema AND
224 | kc.table_name = fk.table_name AND
225 | kc.column_name = fk.column_name
226 | WHERE
227 | kc.table_schema = SCHEMA_NAME() AND
228 | kc.table_name = %s
229 | ORDER BY
230 | kc.constraint_name ASC,
231 | kc.ordinal_position ASC
232 | """, [table_name])
233 | for constraint, column, kind, ref_table, ref_column in cursor.fetchall():
234 | # If we're the first column, make the record
235 | if constraint not in constraints:
236 | constraints[constraint] = {
237 | "columns": [],
238 | "primary_key": kind.lower() == "primary key",
239 | "unique": kind.lower() in ["primary key", "unique"],
240 | "foreign_key": (ref_table, ref_column) if kind.lower() == "foreign key" else None,
241 | "check": False,
242 | "index": False,
243 | }
244 | # Record the details
245 | constraints[constraint]['columns'].append(column)
246 | # Now get CHECK constraint columns
247 | cursor.execute("""
248 | SELECT kc.constraint_name, kc.column_name
249 | FROM INFORMATION_SCHEMA.CONSTRAINT_COLUMN_USAGE AS kc
250 | JOIN INFORMATION_SCHEMA.TABLE_CONSTRAINTS AS c ON
251 | kc.table_schema = c.table_schema AND
252 | kc.table_name = c.table_name AND
253 | kc.constraint_name = c.constraint_name
254 | WHERE
255 | c.constraint_type = 'CHECK' AND
256 | kc.table_schema = SCHEMA_NAME() AND
257 | kc.table_name = %s
258 | """, [table_name])
259 | for constraint, column in cursor.fetchall():
260 | # If we're the first column, make the record
261 | if constraint not in constraints:
262 | constraints[constraint] = {
263 | "columns": [],
264 | "primary_key": False,
265 | "unique": False,
266 | "foreign_key": None,
267 | "check": True,
268 | "index": False,
269 | }
270 | # Record the details
271 | constraints[constraint]['columns'].append(column)
272 | # Now get indexes
273 | cursor.execute("""
274 | SELECT
275 | i.name AS index_name,
276 | i.is_unique,
277 | i.is_primary_key,
278 | i.type,
279 | i.type_desc,
280 | ic.is_descending_key,
281 | c.name AS column_name
282 | FROM
283 | sys.tables AS t
284 | INNER JOIN sys.schemas AS s ON
285 | t.schema_id = s.schema_id
286 | INNER JOIN sys.indexes AS i ON
287 | t.object_id = i.object_id
288 | INNER JOIN sys.index_columns AS ic ON
289 | i.object_id = ic.object_id AND
290 | i.index_id = ic.index_id
291 | INNER JOIN sys.columns AS c ON
292 | ic.object_id = c.object_id AND
293 | ic.column_id = c.column_id
294 | WHERE
295 | t.schema_id = SCHEMA_ID() AND
296 | t.name = %s
297 | ORDER BY
298 | i.index_id ASC,
299 | ic.index_column_id ASC
300 | """, [table_name])
301 | indexes = {}
302 | for index, unique, primary, type_, desc, order, column in cursor.fetchall():
303 | if index not in indexes:
304 | indexes[index] = {
305 | "columns": [],
306 | "primary_key": primary,
307 | "unique": unique,
308 | "foreign_key": None,
309 | "check": False,
310 | "index": True,
311 | "orders": [],
312 | "type": Index.suffix if type_ in (1, 2) else desc.lower(),
313 | }
314 | indexes[index]["columns"].append(column)
315 | indexes[index]["orders"].append("DESC" if order == 1 else "ASC")
316 | for index, constraint in indexes.items():
317 | if index not in constraints:
318 | constraints[index] = constraint
319 | return constraints
320 |
--------------------------------------------------------------------------------
/sql_server/pyodbc/operations.py:
--------------------------------------------------------------------------------
1 | import datetime
2 | import uuid
3 | import warnings
4 | import django
5 |
6 | from django.conf import settings
7 | from django.db.backends.base.operations import BaseDatabaseOperations
8 | from django.db.models import Exists, ExpressionWrapper
9 | from django.db.models.expressions import RawSQL
10 | from django.db.models.sql.where import WhereNode
11 | from django.utils import timezone
12 | from django.utils.encoding import force_str
13 |
14 | import pytz
15 |
16 |
17 | class DatabaseOperations(BaseDatabaseOperations):
18 | compiler_module = 'sql_server.pyodbc.compiler'
19 |
20 | cast_char_field_without_max_length = 'nvarchar(max)'
21 |
22 | def _convert_field_to_tz(self, field_name, tzname):
23 | if settings.USE_TZ and not tzname == 'UTC':
24 | offset = self._get_utcoffset(tzname)
25 | field_name = 'DATEADD(second, %d, %s)' % (offset, field_name)
26 | return field_name
27 |
28 | def _get_utcoffset(self, tzname):
29 | """
30 | Returns UTC offset for given time zone in seconds
31 | """
32 | # SQL Server has no built-in support for tz database, see:
33 | # http://blogs.msdn.com/b/sqlprogrammability/archive/2008/03/18/using-time-zone-data-in-sql-server-2008.aspx
34 | zone = pytz.timezone(tzname)
35 | # no way to take DST into account at this point
36 | now = datetime.datetime.now()
37 | delta = zone.localize(now, is_dst=False).utcoffset()
38 | return delta.days * 86400 + delta.seconds
39 |
40 | def bulk_batch_size(self, fields, objs):
41 | """
42 | Returns the maximum allowed batch size for the backend. The fields
43 | are the fields going to be inserted in the batch, the objs contains
44 | all the objects to be inserted.
45 | """
46 | objs_len, fields_len, max_row_values = len(objs), len(fields), 1000
47 | if (objs_len * fields_len) <= max_row_values:
48 | size = objs_len
49 | else:
50 | size = max_row_values // fields_len
51 | return size
52 |
53 | def bulk_insert_sql(self, fields, placeholder_rows):
54 | placeholder_rows_sql = (", ".join(row) for row in placeholder_rows)
55 | values_sql = ", ".join("(%s)" % sql for sql in placeholder_rows_sql)
56 | return "VALUES " + values_sql
57 |
58 | def cache_key_culling_sql(self):
59 | """
60 | Returns a SQL query that retrieves the first cache key greater than the
61 | smallest.
62 |
63 | This is used by the 'db' cache backend to determine where to start
64 | culling.
65 | """
66 | return "SELECT cache_key FROM (SELECT cache_key, " \
67 | "ROW_NUMBER() OVER (ORDER BY cache_key) AS rn FROM %s" \
68 | ") cache WHERE rn = %%s + 1"
69 |
70 | def combine_duration_expression(self, connector, sub_expressions):
71 | lhs, rhs = sub_expressions
72 | sign = ' * -1' if connector == '-' else ''
73 | if lhs.startswith('DATEADD'):
74 | col, sql = rhs, lhs
75 | else:
76 | col, sql = lhs, rhs
77 | params = [sign for _ in range(sql.count('DATEADD'))]
78 | params.append(col)
79 | return sql % tuple(params)
80 |
81 | def combine_expression(self, connector, sub_expressions):
82 | """
83 | SQL Server requires special cases for some operators in query expressions
84 | """
85 | if connector == '^':
86 | return 'POWER(%s)' % ','.join(sub_expressions)
87 | elif connector == '<<':
88 | return '%s * (2 * %s)' % tuple(sub_expressions)
89 | elif connector == '>>':
90 | return '%s / (2 * %s)' % tuple(sub_expressions)
91 | return super().combine_expression(connector, sub_expressions)
92 |
93 | def convert_datetimefield_value(self, value, expression, connection):
94 | if value is not None:
95 | if settings.USE_TZ:
96 | value = timezone.make_aware(value, self.connection.timezone)
97 | return value
98 |
99 | def convert_floatfield_value(self, value, expression, connection):
100 | if value is not None:
101 | value = float(value)
102 | return value
103 |
104 | def convert_uuidfield_value(self, value, expression, connection):
105 | if value is not None:
106 | value = uuid.UUID(value)
107 | return value
108 |
109 | def convert_booleanfield_value(self, value, expression, connection):
110 | return bool(value) if value in (0, 1) else value
111 |
112 | def date_extract_sql(self, lookup_type, field_name):
113 | if lookup_type == 'week_day':
114 | return "DATEPART(weekday, %s)" % field_name
115 | elif lookup_type == 'week':
116 | return "DATEPART(iso_week, %s)" % field_name
117 | elif lookup_type == 'iso_year':
118 | return "YEAR(DATEADD(day, 26 - DATEPART(isoww, %s), %s))" % (field_name, field_name)
119 | else:
120 | return "DATEPART(%s, %s)" % (lookup_type, field_name)
121 |
122 | def date_interval_sql(self, timedelta):
123 | """
124 | implements the interval functionality for expressions
125 | """
126 | sec = timedelta.seconds + timedelta.days * 86400
127 | sql = 'DATEADD(second, %d%%s, CAST(%%s AS datetime2))' % sec
128 | if timedelta.microseconds:
129 | sql = 'DATEADD(microsecond, %d%%s, CAST(%s AS datetime2))' % (timedelta.microseconds, sql)
130 | return sql
131 |
132 | def date_trunc_sql(self, lookup_type, field_name):
133 | CONVERT_YEAR = 'CONVERT(varchar, DATEPART(year, %s))' % field_name
134 | CONVERT_QUARTER = 'CONVERT(varchar, 1+((DATEPART(quarter, %s)-1)*3))' % field_name
135 | CONVERT_MONTH = 'CONVERT(varchar, DATEPART(month, %s))' % field_name
136 |
137 | if lookup_type == 'year':
138 | return "CONVERT(datetime2, %s + '/01/01')" % CONVERT_YEAR
139 | if lookup_type == 'quarter':
140 | return "CONVERT(datetime2, %s + '/' + %s + '/01')" % (CONVERT_YEAR, CONVERT_QUARTER)
141 | if lookup_type == 'month':
142 | return "CONVERT(datetime2, %s + '/' + %s + '/01')" % (CONVERT_YEAR, CONVERT_MONTH)
143 | if lookup_type == 'week':
144 | CONVERT = "CONVERT(datetime2, CONVERT(varchar(12), %s, 112))" % field_name
145 | return "DATEADD(DAY, (DATEPART(weekday, %s) + 5) %%%% 7 * -1, %s)" % (CONVERT, field_name)
146 | if lookup_type == 'day':
147 | return "CONVERT(datetime2, CONVERT(varchar(12), %s, 112))" % field_name
148 |
149 | def datetime_cast_date_sql(self, field_name, tzname):
150 | field_name = self._convert_field_to_tz(field_name, tzname)
151 | sql = 'CAST(%s AS date)' % field_name
152 | return sql
153 |
154 | def datetime_cast_time_sql(self, field_name, tzname):
155 | field_name = self._convert_field_to_tz(field_name, tzname)
156 | sql = 'CAST(%s AS time)' % field_name
157 | return sql
158 |
159 | def datetime_extract_sql(self, lookup_type, field_name, tzname):
160 | field_name = self._convert_field_to_tz(field_name, tzname)
161 | return self.date_extract_sql(lookup_type, field_name)
162 |
163 | def datetime_trunc_sql(self, lookup_type, field_name, tzname):
164 | field_name = self._convert_field_to_tz(field_name, tzname)
165 | sql = ''
166 | if lookup_type in ('year', 'quarter', 'month', 'week', 'day'):
167 | sql = self.date_trunc_sql(lookup_type, field_name)
168 | elif lookup_type == 'hour':
169 | sql = "CONVERT(datetime2, SUBSTRING(CONVERT(varchar, %s, 20), 0, 14) + ':00:00')" % field_name
170 | elif lookup_type == 'minute':
171 | sql = "CONVERT(datetime2, SUBSTRING(CONVERT(varchar, %s, 20), 0, 17) + ':00')" % field_name
172 | elif lookup_type == 'second':
173 | sql = "CONVERT(datetime2, CONVERT(varchar, %s, 20))" % field_name
174 | return sql
175 |
176 | def for_update_sql(self, nowait=False, skip_locked=False, of=()):
177 | if skip_locked:
178 | return 'WITH (ROWLOCK, UPDLOCK, READPAST)'
179 | elif nowait:
180 | return 'WITH (NOWAIT, ROWLOCK, UPDLOCK)'
181 | else:
182 | return 'WITH (ROWLOCK, UPDLOCK)'
183 |
184 | def format_for_duration_arithmetic(self, sql):
185 | if sql == '%s':
186 | # use DATEADD only once because Django prepares only one parameter for this
187 | fmt = 'DATEADD(second, %s / 1000000%%s, CAST(%%s AS datetime2))'
188 | sql = '%%s'
189 | else:
190 | # use DATEADD twice to avoid arithmetic overflow for number part
191 | MICROSECOND = "DATEADD(microsecond, %s %%%%%%%% 1000000%%s, CAST(%%s AS datetime2))"
192 | fmt = 'DATEADD(second, %s / 1000000%%s, {})'.format(MICROSECOND)
193 | sql = (sql, sql)
194 | return fmt % sql
195 |
196 | def fulltext_search_sql(self, field_name):
197 | """
198 | Returns the SQL WHERE clause to use in order to perform a full-text
199 | search of the given field_name. Note that the resulting string should
200 | contain a '%s' placeholder for the value being searched against.
201 | """
202 | return 'CONTAINS(%s, %%s)' % field_name
203 |
204 | def get_db_converters(self, expression):
205 | converters = super().get_db_converters(expression)
206 | internal_type = expression.output_field.get_internal_type()
207 | if internal_type == 'DateTimeField':
208 | converters.append(self.convert_datetimefield_value)
209 | elif internal_type == 'FloatField':
210 | converters.append(self.convert_floatfield_value)
211 | elif internal_type == 'UUIDField':
212 | converters.append(self.convert_uuidfield_value)
213 | elif internal_type in ('BooleanField', 'NullBooleanField'):
214 | converters.append(self.convert_booleanfield_value)
215 | return converters
216 |
217 | def last_insert_id(self, cursor, table_name, pk_name):
218 | """
219 | Given a cursor object that has just performed an INSERT statement into
220 | a table that has an auto-incrementing ID, returns the newly created ID.
221 |
222 | This method also receives the table name and the name of the primary-key
223 | column.
224 | """
225 | # TODO: Check how the `last_insert_id` is being used in the upper layers
226 | # in context of multithreaded access, compare with other backends
227 |
228 | # IDENT_CURRENT: http://msdn2.microsoft.com/en-us/library/ms175098.aspx
229 | # SCOPE_IDENTITY: http://msdn2.microsoft.com/en-us/library/ms190315.aspx
230 | # @@IDENTITY: http://msdn2.microsoft.com/en-us/library/ms187342.aspx
231 |
232 | # IDENT_CURRENT is not limited by scope and session; it is limited to
233 | # a specified table. IDENT_CURRENT returns the value generated for
234 | # a specific table in any session and any scope.
235 | # SCOPE_IDENTITY and @@IDENTITY return the last identity values that
236 | # are generated in any table in the current session. However,
237 | # SCOPE_IDENTITY returns values inserted only within the current scope;
238 | # @@IDENTITY is not limited to a specific scope.
239 |
240 | table_name = self.quote_name(table_name)
241 | cursor.execute("SELECT CAST(IDENT_CURRENT(%s) AS int)", [table_name])
242 | return cursor.fetchone()[0]
243 |
244 | def lookup_cast(self, lookup_type, internal_type=None):
245 | if lookup_type in ('iexact', 'icontains', 'istartswith', 'iendswith'):
246 | return "UPPER(%s)"
247 | return "%s"
248 |
249 | def max_name_length(self):
250 | return 128
251 |
252 | def no_limit_value(self):
253 | return None
254 |
255 | def prepare_sql_script(self, sql, _allow_fallback=False):
256 | return [sql]
257 |
258 | def quote_name(self, name):
259 | """
260 | Returns a quoted version of the given table, index or column name. Does
261 | not quote the given name if it's already been quoted.
262 | """
263 | if name.startswith('[') and name.endswith(']'):
264 | return name # Quoting once is enough.
265 | return '[%s]' % name
266 |
267 | def random_function_sql(self):
268 | """
269 | Returns a SQL expression that returns a random value.
270 | """
271 | return "RAND()"
272 |
273 | def regex_lookup(self, lookup_type):
274 | """
275 | Returns the string to use in a query when performing regular expression
276 | lookups (using "regex" or "iregex"). The resulting string should
277 | contain a '%s' placeholder for the column being searched against.
278 |
279 | If the feature is not supported (or part of it is not supported), a
280 | NotImplementedError exception can be raised.
281 | """
282 | match_option = {'iregex': 0, 'regex': 1}[lookup_type]
283 | return "dbo.REGEXP_LIKE(%%s, %%s, %s)=1" % (match_option,)
284 |
285 | def limit_offset_sql(self, low_mark, high_mark):
286 | """Return LIMIT/OFFSET SQL clause."""
287 | limit, offset = self._get_limit_offset_params(low_mark, high_mark)
288 | return '%s%s' % (
289 | (' OFFSET %d ROWS' % offset) if offset else '',
290 | (' FETCH FIRST %d ROWS ONLY' % limit) if limit else '',
291 | )
292 |
293 | def last_executed_query(self, cursor, sql, params):
294 | """
295 | Returns a string of the query last executed by the given cursor, with
296 | placeholders replaced with actual values.
297 |
298 | `sql` is the raw query containing placeholders, and `params` is the
299 | sequence of parameters. These are used by default, but this method
300 | exists for database backends to provide a better implementation
301 | according to their own quoting schemes.
302 | """
303 | return super().last_executed_query(cursor, cursor.last_sql, cursor.last_params)
304 |
305 | def savepoint_create_sql(self, sid):
306 | """
307 | Returns the SQL for starting a new savepoint. Only required if the
308 | "uses_savepoints" feature is True. The "sid" parameter is a string
309 | for the savepoint id.
310 | """
311 | return "SAVE TRANSACTION %s" % sid
312 |
313 | def savepoint_rollback_sql(self, sid):
314 | """
315 | Returns the SQL for rolling back the given savepoint.
316 | """
317 | return "ROLLBACK TRANSACTION %s" % sid
318 |
319 | def _build_sequences(self, sequences, cursor):
320 | seqs = []
321 | for seq in sequences:
322 | cursor.execute("SELECT COUNT(*) FROM %s" % self.quote_name(seq["table"]))
323 | rowcnt = cursor.fetchone()[0]
324 | elem = {}
325 | if rowcnt:
326 | elem['start_id'] = 0
327 | else:
328 | elem['start_id'] = 1
329 | elem.update(seq)
330 | seqs.append(elem)
331 | return seqs
332 |
333 | def _sql_flush_new(self, style, tables, *, reset_sequences=False, allow_cascade=False):
334 | if reset_sequences:
335 | return [
336 | sequence
337 | for sequence in self.connection.introspection.sequence_list()
338 | ]
339 |
340 | return []
341 |
342 | def _sql_flush_old(self, style, tables, sequences, allow_cascade=False):
343 | return sequences
344 |
345 | def sql_flush(self, style, tables, *args, **kwargs):
346 | """
347 | Returns a list of SQL statements required to remove all data from
348 | the given database tables (without actually removing the tables
349 | themselves).
350 |
351 | The returned value also includes SQL statements required to reset DB
352 | sequences passed in :param sequences:.
353 |
354 | The `style` argument is a Style object as returned by either
355 | color_style() or no_style() in django.core.management.color.
356 |
357 | The `allow_cascade` argument determines whether truncation may cascade
358 | to tables with foreign keys pointing the tables being truncated.
359 | """
360 |
361 | if not tables:
362 | return []
363 |
364 | if django.VERSION >= (3, 1):
365 | sequences = self._sql_flush_new(style, tables, *args, **kwargs)
366 | else:
367 | sequences = self._sql_flush_old(style, tables, *args, **kwargs)
368 |
369 | from django.db import connections
370 | cursor = connections[self.connection.alias].cursor()
371 |
372 | seqs = self._build_sequences(sequences, cursor)
373 |
374 | COLUMNS = "TABLE_NAME, CONSTRAINT_NAME"
375 | WHERE = "CONSTRAINT_TYPE not in ('PRIMARY KEY','UNIQUE')"
376 | cursor.execute(
377 | "SELECT {} FROM INFORMATION_SCHEMA.TABLE_CONSTRAINTS WHERE {}".format(COLUMNS, WHERE))
378 | fks = cursor.fetchall()
379 | sql_list = ['ALTER TABLE %s NOCHECK CONSTRAINT %s;' %
380 | (self.quote_name(fk[0]), self.quote_name(fk[1])) for fk in fks]
381 | sql_list.extend(['%s %s %s;' % (style.SQL_KEYWORD('DELETE'), style.SQL_KEYWORD('FROM'),
382 | style.SQL_FIELD(self.quote_name(table))) for table in tables])
383 |
384 | if self.connection.to_azure_sql_db and self.connection.sql_server_version < 2014:
385 | warnings.warn("Resetting identity columns is not supported "
386 | "on this versios of Azure SQL Database.",
387 | RuntimeWarning)
388 | else:
389 | # Then reset the counters on each table.
390 | sql_list.extend(['%s %s (%s, %s, %s) %s %s;' % (
391 | style.SQL_KEYWORD('DBCC'),
392 | style.SQL_KEYWORD('CHECKIDENT'),
393 | style.SQL_FIELD(self.quote_name(seq["table"])),
394 | style.SQL_KEYWORD('RESEED'),
395 | style.SQL_FIELD('%d' % seq['start_id']),
396 | style.SQL_KEYWORD('WITH'),
397 | style.SQL_KEYWORD('NO_INFOMSGS'),
398 | ) for seq in seqs])
399 |
400 | sql_list.extend(['ALTER TABLE %s CHECK CONSTRAINT %s;' %
401 | (self.quote_name(fk[0]), self.quote_name(fk[1])) for fk in fks])
402 | return sql_list
403 |
404 | def start_transaction_sql(self):
405 | """
406 | Returns the SQL statement required to start a transaction.
407 | """
408 | return "BEGIN TRANSACTION"
409 |
410 | def subtract_temporals(self, internal_type, lhs, rhs):
411 | lhs_sql, lhs_params = lhs
412 | rhs_sql, rhs_params = rhs
413 | if internal_type == 'DateField':
414 | sql = "CAST(DATEDIFF(day, %(rhs)s, %(lhs)s) AS bigint) * 86400 * 1000000"
415 | params = rhs_params + lhs_params
416 | else:
417 | SECOND = "DATEDIFF(second, %(rhs)s, %(lhs)s)"
418 | MICROSECOND = "DATEPART(microsecond, %(lhs)s) - DATEPART(microsecond, %(rhs)s)"
419 | sql = "CAST({} AS bigint) * 1000000 + {}".format(SECOND, MICROSECOND)
420 | params = rhs_params + lhs_params * 2 + rhs_params
421 | return sql % {'lhs': lhs_sql, 'rhs': rhs_sql}, params
422 |
423 | def tablespace_sql(self, tablespace, inline=False):
424 | """
425 | Returns the SQL that will be appended to tables or rows to define
426 | a tablespace. Returns '' if the backend doesn't use tablespaces.
427 | """
428 | return "ON %s" % self.quote_name(tablespace)
429 |
430 | def prep_for_like_query(self, x):
431 | """Prepares a value for use in a LIKE query."""
432 | # http://msdn2.microsoft.com/en-us/library/ms179859.aspx
433 | return force_str(x).replace('\\', '\\\\').replace('[', '[[]').replace('%', '[%]').replace('_', '[_]')
434 |
435 | def prep_for_iexact_query(self, x):
436 | """
437 | Same as prep_for_like_query(), but called for "iexact" matches, which
438 | need not necessarily be implemented using "LIKE" in the backend.
439 | """
440 | return x
441 |
442 | def adapt_datetimefield_value(self, value):
443 | """
444 | Transforms a datetime value to an object compatible with what is expected
445 | by the backend driver for datetime columns.
446 | """
447 | if value is None:
448 | return None
449 | if settings.USE_TZ and timezone.is_aware(value):
450 | # pyodbc donesn't support datetimeoffset
451 | value = value.astimezone(self.connection.timezone).replace(tzinfo=None)
452 | return value
453 |
454 | def time_trunc_sql(self, lookup_type, field_name):
455 | # if self.connection.sql_server_version >= 2012:
456 | # fields = {
457 | # 'hour': 'DATEPART(hour, %s)' % field_name,
458 | # 'minute': 'DATEPART(minute, %s)' % field_name if lookup_type != 'hour' else '0',
459 | # 'second': 'DATEPART(second, %s)' % field_name if lookup_type == 'second' else '0',
460 | # }
461 | # sql = 'TIMEFROMPARTS(%(hour)s, %(minute)s, %(second)s, 0, 0)' % fields
462 | if lookup_type == 'hour':
463 | sql = "CONVERT(time, SUBSTRING(CONVERT(varchar, %s, 114), 0, 3) + ':00:00')" % field_name
464 | elif lookup_type == 'minute':
465 | sql = "CONVERT(time, SUBSTRING(CONVERT(varchar, %s, 114), 0, 6) + ':00')" % field_name
466 | elif lookup_type == 'second':
467 | sql = "CONVERT(time, SUBSTRING(CONVERT(varchar, %s, 114), 0, 9))" % field_name
468 | return sql
469 |
470 | def conditional_expression_supported_in_where_clause(self, expression):
471 | """
472 | Following "Moved conditional expression wrapping to the Exact lookup" in django 3.1
473 | https://github.com/django/django/commit/37e6c5b79bd0529a3c85b8c478e4002fd33a2a1d
474 | """
475 | if django.VERSION >= (3, 1):
476 | if isinstance(expression, (Exists, WhereNode)):
477 | return True
478 | if isinstance(expression, ExpressionWrapper) and expression.conditional:
479 | return self.conditional_expression_supported_in_where_clause(expression.expression)
480 | if isinstance(expression, RawSQL) and expression.conditional:
481 | return True
482 | return False
483 | return True
484 |
--------------------------------------------------------------------------------
/sql_server/pyodbc/compiler.py:
--------------------------------------------------------------------------------
1 | import types
2 | from itertools import chain
3 |
4 | import django
5 | from django.db.models.aggregates import Avg, Count, StdDev, Variance
6 | from django.db.models.expressions import Ref, Subquery, Value
7 | from django.db.models.functions import (
8 | Chr, ConcatPair, Greatest, Least, Length, LPad, Repeat, RPad, StrIndex, Substr, Trim
9 | )
10 | from django.db.models.sql import compiler
11 | from django.db.transaction import TransactionManagementError
12 | from django.db.utils import NotSupportedError
13 |
14 |
15 | def _as_sql_agv(self, compiler, connection):
16 | return self.as_sql(compiler, connection, template='%(function)s(CONVERT(float, %(field)s))')
17 |
18 |
19 | def _as_sql_chr(self, compiler, connection):
20 | return self.as_sql(compiler, connection, function='NCHAR')
21 |
22 |
23 | def _as_sql_concatpair(self, compiler, connection):
24 | if connection.sql_server_version < 2012:
25 | node = self.coalesce()
26 | return node.as_sql(compiler, connection, arg_joiner=' + ', template='%(expressions)s')
27 | else:
28 | return self.as_sql(compiler, connection)
29 |
30 |
31 | def _as_sql_count(self, compiler, connection):
32 | return self.as_sql(compiler, connection, function='COUNT_BIG')
33 |
34 |
35 | def _as_sql_greatest(self, compiler, connection):
36 | # SQL Server does not provide GREATEST function,
37 | # so we emulate it with a table value constructor
38 | # https://msdn.microsoft.com/en-us/library/dd776382.aspx
39 | template = '(SELECT MAX(value) FROM (VALUES (%(expressions)s)) AS _%(function)s(value))'
40 | return self.as_sql(compiler, connection, arg_joiner='), (', template=template)
41 |
42 |
43 | def _as_sql_least(self, compiler, connection):
44 | # SQL Server does not provide LEAST function,
45 | # so we emulate it with a table value constructor
46 | # https://msdn.microsoft.com/en-us/library/dd776382.aspx
47 | template = '(SELECT MIN(value) FROM (VALUES (%(expressions)s)) AS _%(function)s(value))'
48 | return self.as_sql(compiler, connection, arg_joiner='), (', template=template)
49 |
50 |
51 | def _as_sql_length(self, compiler, connection):
52 | return self.as_sql(compiler, connection, function='LEN')
53 |
54 |
55 | def _as_sql_lpad(self, compiler, connection):
56 | i = iter(self.get_source_expressions())
57 | expression, expression_arg = compiler.compile(next(i))
58 | length, length_arg = compiler.compile(next(i))
59 | fill_text, fill_text_arg = compiler.compile(next(i))
60 | params = []
61 | params.extend(fill_text_arg)
62 | params.extend(length_arg)
63 | params.extend(length_arg)
64 | params.extend(expression_arg)
65 | params.extend(length_arg)
66 | params.extend(expression_arg)
67 | params.extend(expression_arg)
68 | template = ('LEFT(REPLICATE(%(fill_text)s, %(length)s), CASE WHEN %(length)s > LEN(%(expression)s) '
69 | 'THEN %(length)s - LEN(%(expression)s) ELSE 0 END) + %(expression)s')
70 | return template % {'expression': expression, 'length': length, 'fill_text': fill_text}, params
71 |
72 |
73 | def _as_sql_repeat(self, compiler, connection):
74 | return self.as_sql(compiler, connection, function='REPLICATE')
75 |
76 |
77 | def _as_sql_rpad(self, compiler, connection):
78 | i = iter(self.get_source_expressions())
79 | expression, expression_arg = compiler.compile(next(i))
80 | length, length_arg = compiler.compile(next(i))
81 | fill_text, fill_text_arg = compiler.compile(next(i))
82 | params = []
83 | params.extend(expression_arg)
84 | params.extend(fill_text_arg)
85 | params.extend(length_arg)
86 | params.extend(length_arg)
87 | template = 'LEFT(%(expression)s + REPLICATE(%(fill_text)s, %(length)s), %(length)s)'
88 | return template % {'expression': expression, 'length': length, 'fill_text': fill_text}, params
89 |
90 |
91 | def _as_sql_stddev(self, compiler, connection):
92 | function = 'STDEV'
93 | if self.function == 'STDDEV_POP':
94 | function = '%sP' % function
95 | return self.as_sql(compiler, connection, function=function)
96 |
97 |
98 | def _as_sql_strindex(self, compiler, connection):
99 | self.source_expressions.reverse()
100 | sql = self.as_sql(compiler, connection, function='CHARINDEX')
101 | self.source_expressions.reverse()
102 | return sql
103 |
104 |
105 | def _as_sql_substr(self, compiler, connection):
106 | if len(self.get_source_expressions()) < 3:
107 | self.get_source_expressions().append(Value(2**31 - 1))
108 | return self.as_sql(compiler, connection)
109 |
110 |
111 | def _as_sql_trim(self, compiler, connection):
112 | return self.as_sql(compiler, connection, template='LTRIM(RTRIM(%(expressions)s))')
113 |
114 |
115 | def _as_sql_variance(self, compiler, connection):
116 | function = 'VAR'
117 | if self.function == 'VAR_POP':
118 | function = '%sP' % function
119 | return self.as_sql(compiler, connection, function=function)
120 |
121 |
122 | def _cursor_iter(cursor, sentinel, col_count, itersize):
123 | """
124 | Yields blocks of rows from a cursor and ensures the cursor is closed when
125 | done.
126 | """
127 | if not hasattr(cursor.db, 'supports_mars') or cursor.db.supports_mars:
128 | # same as the original Django implementation
129 | try:
130 | for rows in iter((lambda: cursor.fetchmany(itersize)), sentinel):
131 | yield rows if col_count is None else [r[:col_count] for r in rows]
132 | finally:
133 | cursor.close()
134 | else:
135 | # retrieve all chunks from the cursor and close it before yielding
136 | # so that we can open an another cursor over an iteration
137 | # (for drivers such as FreeTDS)
138 | chunks = []
139 | try:
140 | for rows in iter((lambda: cursor.fetchmany(itersize)), sentinel):
141 | chunks.append(rows if col_count is None else [r[:col_count] for r in rows])
142 | finally:
143 | cursor.close()
144 | for rows in chunks:
145 | yield rows
146 |
147 |
148 | compiler.cursor_iter = _cursor_iter
149 |
150 |
151 | class SQLCompiler(compiler.SQLCompiler):
152 |
153 | def as_sql(self, with_limits=True, with_col_aliases=False):
154 | """
155 | Create the SQL for this query. Return the SQL string and list of
156 | parameters.
157 |
158 | If 'with_limits' is False, any limit/offset information is not included
159 | in the query.
160 | """
161 | refcounts_before = self.query.alias_refcount.copy()
162 | try:
163 | extra_select, order_by, group_by = self.pre_sql_setup()
164 | for_update_part = None
165 | # Is a LIMIT/OFFSET clause needed?
166 | with_limit_offset = with_limits and (self.query.high_mark is not None or self.query.low_mark)
167 | combinator = self.query.combinator
168 | features = self.connection.features
169 |
170 | # The do_offset flag indicates whether we need to construct
171 | # the SQL needed to use limit/offset w/SQL Server.
172 | high_mark = self.query.high_mark
173 | low_mark = self.query.low_mark
174 | do_limit = with_limits and high_mark is not None
175 | do_offset = with_limits and low_mark != 0
176 | # SQL Server 2012 or newer supports OFFSET/FETCH clause
177 | supports_offset_clause = self.connection.sql_server_version >= 2012
178 | do_offset_emulation = do_offset and not supports_offset_clause
179 |
180 | if combinator:
181 | if not getattr(features, 'supports_select_{}'.format(combinator)):
182 | raise NotSupportedError('{} is not supported on this database backend.'.format(combinator))
183 | result, params = self.get_combinator_sql(combinator, self.query.combinator_all)
184 | else:
185 | distinct_fields, distinct_params = self.get_distinct()
186 | # This must come after 'select', 'ordering', and 'distinct' -- see
187 | # docstring of get_from_clause() for details.
188 | from_, f_params = self.get_from_clause()
189 | where, w_params = self.compile(self.where) if self.where is not None else ("", [])
190 | having, h_params = self.compile(self.having) if self.having is not None else ("", [])
191 | params = []
192 | result = ['SELECT']
193 |
194 | if self.query.distinct:
195 | distinct_result, distinct_params = self.connection.ops.distinct_sql(
196 | distinct_fields,
197 | distinct_params,
198 | )
199 | result += distinct_result
200 | params += distinct_params
201 |
202 | # SQL Server requires the keword for limitting at the begenning
203 | if do_limit and not do_offset:
204 | result.append('TOP %d' % high_mark)
205 |
206 | out_cols = []
207 | col_idx = 1
208 | for _, (s_sql, s_params), alias in self.select + extra_select:
209 | if alias:
210 | s_sql = '%s AS %s' % (s_sql, self.connection.ops.quote_name(alias))
211 | elif with_col_aliases or do_offset_emulation:
212 | s_sql = '%s AS %s' % (s_sql, 'Col%d' % col_idx)
213 | col_idx += 1
214 | params.extend(s_params)
215 | out_cols.append(s_sql)
216 |
217 | # SQL Server requires an order-by clause for offsetting
218 | if do_offset:
219 | meta = self.query.get_meta()
220 | qn = self.quote_name_unless_alias
221 | offsetting_order_by = '%s.%s' % (qn(meta.db_table), qn(meta.pk.db_column or meta.pk.column))
222 | if do_offset_emulation:
223 | if order_by:
224 | ordering = []
225 | for expr, (o_sql, o_params, _) in order_by:
226 | # value_expression in OVER clause cannot refer to
227 | # expressions or aliases in the select list. See:
228 | # http://msdn.microsoft.com/en-us/library/ms189461.aspx
229 | src = next(iter(expr.get_source_expressions()))
230 | if isinstance(src, Ref):
231 | src = next(iter(src.get_source_expressions()))
232 | o_sql, _ = src.as_sql(self, self.connection)
233 | odir = 'DESC' if expr.descending else 'ASC'
234 | o_sql = '%s %s' % (o_sql, odir)
235 | ordering.append(o_sql)
236 | params.extend(o_params)
237 | offsetting_order_by = ', '.join(ordering)
238 | order_by = []
239 | out_cols.append('ROW_NUMBER() OVER (ORDER BY %s) AS [rn]' % offsetting_order_by)
240 | elif not order_by:
241 | order_by.append(((None, ('%s ASC' % offsetting_order_by, [], None))))
242 |
243 | if self.query.select_for_update and self.connection.features.has_select_for_update:
244 | if self.connection.get_autocommit():
245 | raise TransactionManagementError('select_for_update cannot be used outside of a transaction.')
246 |
247 | if with_limit_offset and not self.connection.features.supports_select_for_update_with_limit:
248 | raise NotSupportedError(
249 | 'LIMIT/OFFSET is not supported with '
250 | 'select_for_update on this database backend.'
251 | )
252 | nowait = self.query.select_for_update_nowait
253 | skip_locked = self.query.select_for_update_skip_locked
254 | of = self.query.select_for_update_of
255 | # If it's a NOWAIT/SKIP LOCKED/OF query but the backend
256 | # doesn't support it, raise NotSupportedError to prevent a
257 | # possible deadlock.
258 | if nowait and not self.connection.features.has_select_for_update_nowait:
259 | raise NotSupportedError('NOWAIT is not supported on this database backend.')
260 | elif skip_locked and not self.connection.features.has_select_for_update_skip_locked:
261 | raise NotSupportedError('SKIP LOCKED is not supported on this database backend.')
262 | elif of and not self.connection.features.has_select_for_update_of:
263 | raise NotSupportedError('FOR UPDATE OF is not supported on this database backend.')
264 | for_update_part = self.connection.ops.for_update_sql(
265 | nowait=nowait,
266 | skip_locked=skip_locked,
267 | of=self.get_select_for_update_of_arguments(),
268 | )
269 |
270 | if for_update_part and self.connection.features.for_update_after_from:
271 | from_.insert(1, for_update_part)
272 |
273 | result += [', '.join(out_cols), 'FROM', *from_]
274 | params.extend(f_params)
275 |
276 | if where:
277 | result.append('WHERE %s' % where)
278 | params.extend(w_params)
279 |
280 | grouping = []
281 | for g_sql, g_params in group_by:
282 | grouping.append(g_sql)
283 | params.extend(g_params)
284 | if grouping:
285 | if distinct_fields:
286 | raise NotImplementedError('annotate() + distinct(fields) is not implemented.')
287 | order_by = order_by or self.connection.ops.force_no_ordering()
288 | result.append('GROUP BY %s' % ', '.join(grouping))
289 |
290 | if having:
291 | result.append('HAVING %s' % having)
292 | params.extend(h_params)
293 |
294 | if self.query.explain_query:
295 | result.insert(0, self.connection.ops.explain_query_prefix(
296 | self.query.explain_format,
297 | **self.query.explain_options
298 | ))
299 |
300 | if order_by:
301 | ordering = []
302 | for _, (o_sql, o_params, _) in order_by:
303 | ordering.append(o_sql)
304 | params.extend(o_params)
305 | result.append('ORDER BY %s' % ', '.join(ordering))
306 |
307 | # SQL Server requires the backend-specific emulation (2008 or earlier)
308 | # or an offset clause (2012 or newer) for offsetting
309 | if do_offset:
310 | if do_offset_emulation:
311 | # Construct the final SQL clause, using the initial select SQL
312 | # obtained above.
313 | result = ['SELECT * FROM (%s) AS X WHERE X.rn' % ' '.join(result)]
314 | # Place WHERE condition on `rn` for the desired range.
315 | if do_limit:
316 | result.append('BETWEEN %d AND %d' % (low_mark + 1, high_mark))
317 | else:
318 | result.append('>= %d' % (low_mark + 1))
319 | if not self.query.subquery:
320 | result.append('ORDER BY X.rn')
321 | else:
322 | result.append(self.connection.ops.limit_offset_sql(self.query.low_mark, self.query.high_mark))
323 |
324 | if self.query.subquery and extra_select:
325 | # If the query is used as a subquery, the extra selects would
326 | # result in more columns than the left-hand side expression is
327 | # expecting. This can happen when a subquery uses a combination
328 | # of order_by() and distinct(), forcing the ordering expressions
329 | # to be selected as well. Wrap the query in another subquery
330 | # to exclude extraneous selects.
331 | sub_selects = []
332 | sub_params = []
333 | for index, (select, _, alias) in enumerate(self.select, start=1):
334 | if not alias and with_col_aliases:
335 | alias = 'col%d' % index
336 | if alias:
337 | sub_selects.append("%s.%s" % (
338 | self.connection.ops.quote_name('subquery'),
339 | self.connection.ops.quote_name(alias),
340 | ))
341 | else:
342 | select_clone = select.relabeled_clone({select.alias: 'subquery'})
343 | subselect, subparams = select_clone.as_sql(self, self.connection)
344 | sub_selects.append(subselect)
345 | sub_params.extend(subparams)
346 | return 'SELECT %s FROM (%s) subquery' % (
347 | ', '.join(sub_selects),
348 | ' '.join(result),
349 | ), tuple(sub_params + params)
350 |
351 | return ' '.join(result), tuple(params)
352 | finally:
353 | # Finally do cleanup - get rid of the joins we created above.
354 | self.query.reset_refcounts(refcounts_before)
355 |
356 | def compile(self, node, *args, **kwargs):
357 | node = self._as_microsoft(node)
358 | return super().compile(node, *args, **kwargs)
359 |
360 | def collapse_group_by(self, expressions, having):
361 | expressions = super().collapse_group_by(expressions, having)
362 | return [e for e in expressions if not isinstance(e, Subquery)]
363 |
364 | def _as_microsoft(self, node):
365 | as_microsoft = None
366 | if isinstance(node, Avg):
367 | as_microsoft = _as_sql_agv
368 | elif isinstance(node, Chr):
369 | as_microsoft = _as_sql_chr
370 | elif isinstance(node, ConcatPair):
371 | as_microsoft = _as_sql_concatpair
372 | elif isinstance(node, Count):
373 | as_microsoft = _as_sql_count
374 | elif isinstance(node, Greatest):
375 | as_microsoft = _as_sql_greatest
376 | elif isinstance(node, Least):
377 | as_microsoft = _as_sql_least
378 | elif isinstance(node, Length):
379 | as_microsoft = _as_sql_length
380 | elif isinstance(node, RPad):
381 | as_microsoft = _as_sql_rpad
382 | elif isinstance(node, LPad):
383 | as_microsoft = _as_sql_lpad
384 | elif isinstance(node, Repeat):
385 | as_microsoft = _as_sql_repeat
386 | elif isinstance(node, StdDev):
387 | as_microsoft = _as_sql_stddev
388 | elif isinstance(node, StrIndex):
389 | as_microsoft = _as_sql_strindex
390 | elif isinstance(node, Substr):
391 | as_microsoft = _as_sql_substr
392 | elif isinstance(node, Trim):
393 | as_microsoft = _as_sql_trim
394 | elif isinstance(node, Variance):
395 | as_microsoft = _as_sql_variance
396 | if as_microsoft:
397 | node = node.copy()
398 | node.as_microsoft = types.MethodType(as_microsoft, node)
399 | return node
400 |
401 |
402 | class SQLInsertCompiler(compiler.SQLInsertCompiler, SQLCompiler):
403 | def get_returned_fields(self):
404 | if django.VERSION >= (3, 0, 0):
405 | return self.returning_fields
406 | return self.return_id
407 |
408 | def fix_auto(self, sql, opts, fields, qn):
409 | if opts.auto_field is not None:
410 | # db_column is None if not explicitly specified by model field
411 | auto_field_column = opts.auto_field.db_column or opts.auto_field.column
412 | columns = [f.column for f in fields]
413 | if auto_field_column in columns:
414 | id_insert_sql = []
415 | table = qn(opts.db_table)
416 | sql_format = 'SET IDENTITY_INSERT %s ON; %s; SET IDENTITY_INSERT %s OFF'
417 | for q, p in sql:
418 | id_insert_sql.append((sql_format % (table, q, table), p))
419 | sql = id_insert_sql
420 |
421 | return sql
422 |
423 | def as_sql(self):
424 | # We don't need quote_name_unless_alias() here, since these are all
425 | # going to be column names (so we can avoid the extra overhead).
426 | qn = self.connection.ops.quote_name
427 | opts = self.query.get_meta()
428 | result = ['INSERT INTO %s' % qn(opts.db_table)]
429 | fields = self.query.fields or [opts.pk]
430 |
431 | if self.query.fields:
432 | result.append('(%s)' % ', '.join(qn(f.column) for f in fields))
433 | values_format = 'VALUES (%s)'
434 | value_rows = [
435 | [self.prepare_value(field, self.pre_save_val(field, obj)) for field in fields]
436 | for obj in self.query.objs
437 | ]
438 | else:
439 | values_format = '%s VALUES'
440 | # An empty object.
441 | value_rows = [[self.connection.ops.pk_default_value()] for _ in self.query.objs]
442 | fields = [None]
443 |
444 | # Currently the backends just accept values when generating bulk
445 | # queries and generate their own placeholders. Doing that isn't
446 | # necessary and it should be possible to use placeholders and
447 | # expressions in bulk inserts too.
448 | can_bulk = (not self.get_returned_fields() and self.connection.features.has_bulk_insert) and self.query.fields
449 |
450 | placeholder_rows, param_rows = self.assemble_as_sql(fields, value_rows)
451 |
452 | if self.get_returned_fields() and self.connection.features.can_return_id_from_insert:
453 | result.insert(0, 'SET NOCOUNT ON')
454 | result.append((values_format + ';') % ', '.join(placeholder_rows[0]))
455 | params = [param_rows[0]]
456 | result.append('SELECT CAST(SCOPE_IDENTITY() AS bigint)')
457 | sql = [(" ".join(result), tuple(chain.from_iterable(params)))]
458 | else:
459 | if can_bulk:
460 | result.append(self.connection.ops.bulk_insert_sql(fields, placeholder_rows))
461 | sql = [(" ".join(result), tuple(p for ps in param_rows for p in ps))]
462 | else:
463 | sql = [
464 | (" ".join(result + [values_format % ", ".join(p)]), vals)
465 | for p, vals in zip(placeholder_rows, param_rows)
466 | ]
467 |
468 | if self.query.fields:
469 | sql = self.fix_auto(sql, opts, fields, qn)
470 |
471 | return sql
472 |
473 |
474 | class SQLDeleteCompiler(compiler.SQLDeleteCompiler, SQLCompiler):
475 | def as_sql(self):
476 | sql, params = super().as_sql()
477 | if sql:
478 | sql = '; '.join(['SET NOCOUNT OFF', sql])
479 | return sql, params
480 |
481 |
482 | class SQLUpdateCompiler(compiler.SQLUpdateCompiler, SQLCompiler):
483 | def as_sql(self):
484 | sql, params = super().as_sql()
485 | if sql:
486 | sql = '; '.join(['SET NOCOUNT OFF', sql])
487 | return sql, params
488 |
489 |
490 | class SQLAggregateCompiler(compiler.SQLAggregateCompiler, SQLCompiler):
491 | pass
492 |
--------------------------------------------------------------------------------
/sql_server/pyodbc/base.py:
--------------------------------------------------------------------------------
1 | """
2 | MS SQL Server database backend for Django.
3 | """
4 | import os
5 | import re
6 | import time
7 |
8 | from django.core.exceptions import ImproperlyConfigured
9 |
10 | try:
11 | import pyodbc as Database
12 | except ImportError as e:
13 | raise ImproperlyConfigured("Error loading pyodbc module: %s" % e)
14 |
15 | from django.utils.version import get_version_tuple # noqa
16 |
17 | pyodbc_ver = get_version_tuple(Database.version)
18 | if pyodbc_ver < (3, 0):
19 | raise ImproperlyConfigured("pyodbc 3.0 or newer is required; you have %s" % Database.version)
20 |
21 | from django.conf import settings # noqa
22 | from django.db import NotSupportedError # noqa
23 | from django.db.backends.base.base import BaseDatabaseWrapper # noqa
24 | from django.utils.encoding import smart_str # noqa
25 | from django.utils.functional import cached_property # noqa
26 |
27 | if hasattr(settings, 'DATABASE_CONNECTION_POOLING'):
28 | if not settings.DATABASE_CONNECTION_POOLING:
29 | Database.pooling = False
30 |
31 | from .client import DatabaseClient # noqa
32 | from .creation import DatabaseCreation # noqa
33 | from .features import DatabaseFeatures # noqa
34 | from .introspection import DatabaseIntrospection # noqa
35 | from .operations import DatabaseOperations # noqa
36 | from .schema import DatabaseSchemaEditor # noqa
37 |
38 | EDITION_AZURE_SQL_DB = 5
39 |
40 |
41 | def encode_connection_string(fields):
42 | """Encode dictionary of keys and values as an ODBC connection String.
43 |
44 | See [MS-ODBCSTR] document:
45 | https://msdn.microsoft.com/en-us/library/ee208909%28v=sql.105%29.aspx
46 | """
47 | # As the keys are all provided by us, don't need to encode them as we know
48 | # they are ok.
49 | return ';'.join(
50 | '%s=%s' % (k, encode_value(v))
51 | for k, v in fields.items()
52 | )
53 |
54 |
55 | def encode_value(v):
56 | """If the value contains a semicolon, or starts with a left curly brace,
57 | then enclose it in curly braces and escape all right curly braces.
58 | """
59 | if ';' in v or v.strip(' ').startswith('{'):
60 | return '{%s}' % (v.replace('}', '}}'),)
61 | return v
62 |
63 |
64 | class DatabaseWrapper(BaseDatabaseWrapper):
65 | vendor = 'microsoft'
66 | display_name = 'SQL Server'
67 | # This dictionary maps Field objects to their associated MS SQL column
68 | # types, as strings. Column-type strings can contain format strings; they'll
69 | # be interpolated against the values of Field.__dict__ before being output.
70 | # If a column type is set to None, it won't be included in the output.
71 | data_types = {
72 | 'AutoField': 'int IDENTITY (1, 1)',
73 | 'BigAutoField': 'bigint IDENTITY (1, 1)',
74 | 'BigIntegerField': 'bigint',
75 | 'BinaryField': 'varbinary(max)',
76 | 'BooleanField': 'bit',
77 | 'CharField': 'nvarchar(%(max_length)s)',
78 | 'DateField': 'date',
79 | 'DateTimeField': 'datetime2',
80 | 'DecimalField': 'numeric(%(max_digits)s, %(decimal_places)s)',
81 | 'DurationField': 'bigint',
82 | 'FileField': 'nvarchar(%(max_length)s)',
83 | 'FilePathField': 'nvarchar(%(max_length)s)',
84 | 'FloatField': 'double precision',
85 | 'IntegerField': 'int',
86 | 'IPAddressField': 'nvarchar(15)',
87 | 'GenericIPAddressField': 'nvarchar(39)',
88 | 'NullBooleanField': 'bit',
89 | 'OneToOneField': 'int',
90 | 'PositiveIntegerField': 'int',
91 | 'PositiveSmallIntegerField': 'smallint',
92 | 'PositiveBigIntegerField': 'bigint',
93 | 'SlugField': 'nvarchar(%(max_length)s)',
94 | 'SmallAutoField': 'smallint IDENTITY (1, 1)',
95 | 'SmallIntegerField': 'smallint',
96 | 'TextField': 'nvarchar(max)',
97 | 'TimeField': 'time',
98 | 'UUIDField': 'char(32)',
99 | 'JSONField': 'nvarchar(max)',
100 | }
101 | data_type_check_constraints = {
102 | 'PositiveIntegerField': '[%(column)s] >= 0',
103 | 'PositiveSmallIntegerField': '[%(column)s] >= 0',
104 | }
105 | operators = {
106 | # Since '=' is used not only for string comparision there is no way
107 | # to make it case (in)sensitive.
108 | 'exact': '= %s',
109 | 'iexact': "= UPPER(%s)",
110 | 'contains': "LIKE %s ESCAPE '\\'",
111 | 'icontains': "LIKE UPPER(%s) ESCAPE '\\'",
112 | 'gt': '> %s',
113 | 'gte': '>= %s',
114 | 'lt': '< %s',
115 | 'lte': '<= %s',
116 | 'startswith': "LIKE %s ESCAPE '\\'",
117 | 'endswith': "LIKE %s ESCAPE '\\'",
118 | 'istartswith': "LIKE UPPER(%s) ESCAPE '\\'",
119 | 'iendswith': "LIKE UPPER(%s) ESCAPE '\\'",
120 | }
121 |
122 | # The patterns below are used to generate SQL pattern lookup clauses when
123 | # the right-hand side of the lookup isn't a raw string (it might be an expression
124 | # or the result of a bilateral transformation).
125 | # In those cases, special characters for LIKE operators (e.g. \, *, _) should be
126 | # escaped on database side.
127 | #
128 | # Note: we use str.format() here for readability as '%' is used as a wildcard for
129 | # the LIKE operator.
130 | pattern_esc = r"REPLACE(REPLACE(REPLACE({}, '\', '[\]'), '%%', '[%%]'), '_', '[_]')"
131 | pattern_ops = {
132 | 'contains': "LIKE '%%' + {} + '%%'",
133 | 'icontains': "LIKE '%%' + UPPER({}) + '%%'",
134 | 'startswith': "LIKE {} + '%%'",
135 | 'istartswith': "LIKE UPPER({}) + '%%'",
136 | 'endswith': "LIKE '%%' + {}",
137 | 'iendswith': "LIKE '%%' + UPPER({})",
138 | }
139 |
140 | Database = Database
141 | SchemaEditorClass = DatabaseSchemaEditor
142 | # Classes instantiated in __init__().
143 | client_class = DatabaseClient
144 | creation_class = DatabaseCreation
145 | features_class = DatabaseFeatures
146 | introspection_class = DatabaseIntrospection
147 | ops_class = DatabaseOperations
148 |
149 | _codes_for_networkerror = (
150 | '08S01',
151 | '08S02',
152 | )
153 | _sql_server_versions = {
154 | 9: 2005,
155 | 10: 2008,
156 | 11: 2012,
157 | 12: 2014,
158 | 13: 2016,
159 | 14: 2017,
160 | 15: 2019,
161 | }
162 |
163 | # https://azure.microsoft.com/en-us/documentation/articles/sql-database-develop-csharp-retry-windows/
164 | _transient_error_numbers = (
165 | '4060',
166 | '10928',
167 | '10929',
168 | '40197',
169 | '40501',
170 | '40613',
171 | '49918',
172 | '49919',
173 | '49920',
174 | )
175 |
176 | def __init__(self, *args, **kwargs):
177 | super().__init__(*args, **kwargs)
178 |
179 | opts = self.settings_dict["OPTIONS"]
180 |
181 | # capability for multiple result sets or cursors
182 | self.supports_mars = False
183 |
184 | # Some drivers need unicode encoded as UTF8. If this is left as
185 | # None, it will be determined based on the driver, namely it'll be
186 | # False if the driver is a windows driver and True otherwise.
187 | #
188 | # However, recent versions of FreeTDS and pyodbc (0.91 and 3.0.6 as
189 | # of writing) are perfectly okay being fed unicode, which is why
190 | # this option is configurable.
191 | if 'driver_needs_utf8' in opts:
192 | self.driver_charset = 'utf-8'
193 | else:
194 | self.driver_charset = opts.get('driver_charset', None)
195 |
196 | # interval to wait for recovery from network error
197 | interval = opts.get('connection_recovery_interval_msec', 0.0)
198 | self.connection_recovery_interval_msec = float(interval) / 1000
199 |
200 | # make lookup operators to be collation-sensitive if needed
201 | collation = opts.get('collation', None)
202 | if collation:
203 | self.operators = dict(self.__class__.operators)
204 | ops = {}
205 | for op in self.operators:
206 | sql = self.operators[op]
207 | if sql.startswith('LIKE '):
208 | ops[op] = '%s COLLATE %s' % (sql, collation)
209 | self.operators.update(ops)
210 |
211 | def create_cursor(self, name=None):
212 | return CursorWrapper(self.connection.cursor(), self)
213 |
214 | def _cursor(self):
215 | new_conn = False
216 |
217 | if self.connection is None:
218 | new_conn = True
219 |
220 | conn = super()._cursor()
221 | if new_conn:
222 | if self.sql_server_version <= 2005:
223 | self.data_types['DateField'] = 'datetime'
224 | self.data_types['DateTimeField'] = 'datetime'
225 | self.data_types['TimeField'] = 'datetime'
226 |
227 | return conn
228 |
229 | def get_connection_params(self):
230 | settings_dict = self.settings_dict
231 | if settings_dict['NAME'] == '':
232 | raise ImproperlyConfigured(
233 | "settings.DATABASES is improperly configured. "
234 | "Please supply the NAME value.")
235 | conn_params = settings_dict.copy()
236 | if conn_params['NAME'] is None:
237 | conn_params['NAME'] = 'master'
238 | return conn_params
239 |
240 | def get_new_connection(self, conn_params):
241 | database = conn_params['NAME']
242 | host = conn_params.get('HOST', 'localhost')
243 | user = conn_params.get('USER', None)
244 | password = conn_params.get('PASSWORD', None)
245 | port = conn_params.get('PORT', None)
246 |
247 | options = conn_params.get('OPTIONS', {})
248 | driver = options.get('driver', 'ODBC Driver 13 for SQL Server')
249 | dsn = options.get('dsn', None)
250 |
251 | # Microsoft driver names assumed here are:
252 | # * SQL Server Native Client 10.0/11.0
253 | # * ODBC Driver 11/13 for SQL Server
254 | ms_drivers = re.compile('^ODBC Driver .* for SQL Server$|^SQL Server Native Client')
255 |
256 | # available ODBC connection string keywords:
257 | # (Microsoft drivers for Windows)
258 | # https://docs.microsoft.com/en-us/sql/relational-databases/native-client/applications/using-connection-string-keywords-with-sql-server-native-client
259 | # (Microsoft drivers for Linux/Mac)
260 | # https://docs.microsoft.com/en-us/sql/connect/odbc/linux-mac/connection-string-keywords-and-data-source-names-dsns
261 | # (FreeTDS)
262 | # http://www.freetds.org/userguide/odbcconnattr.htm
263 | cstr_parts = {}
264 | if dsn:
265 | cstr_parts['DSN'] = dsn
266 | else:
267 | # Only append DRIVER if DATABASE_ODBC_DSN hasn't been set
268 | cstr_parts['DRIVER'] = driver
269 |
270 | if ms_drivers.match(driver):
271 | if port:
272 | host = ','.join((host, str(port)))
273 | cstr_parts['SERVER'] = host
274 | elif options.get('host_is_server', False):
275 | if port:
276 | cstr_parts['PORT'] = str(port)
277 | cstr_parts['SERVER'] = host
278 | else:
279 | cstr_parts['SERVERNAME'] = host
280 |
281 | if user:
282 | cstr_parts['UID'] = user
283 | cstr_parts['PWD'] = password
284 | else:
285 | if ms_drivers.match(driver):
286 | cstr_parts['Trusted_Connection'] = 'yes'
287 | else:
288 | cstr_parts['Integrated Security'] = 'SSPI'
289 |
290 | cstr_parts['DATABASE'] = database
291 |
292 | if ms_drivers.match(driver) and os.name == 'nt':
293 | cstr_parts['MARS_Connection'] = 'yes'
294 |
295 | connstr = encode_connection_string(cstr_parts)
296 |
297 | # extra_params are glued on the end of the string without encoding,
298 | # so it's up to the settings writer to make sure they're appropriate -
299 | # use encode_connection_string if constructing from external input.
300 | if options.get('extra_params', None):
301 | connstr += ';' + options['extra_params']
302 |
303 | unicode_results = options.get('unicode_results', False)
304 | timeout = options.get('connection_timeout', 0)
305 | retries = options.get('connection_retries', 5)
306 | backoff_time = options.get('connection_retry_backoff_time', 5)
307 | query_timeout = options.get('query_timeout', 0)
308 |
309 | conn = None
310 | retry_count = 0
311 | need_to_retry = False
312 | while conn is None:
313 | try:
314 | conn = Database.connect(connstr,
315 | unicode_results=unicode_results,
316 | timeout=timeout)
317 | except Exception as e:
318 | for error_number in self._transient_error_numbers:
319 | if error_number in e.args[1]:
320 | if error_number in e.args[1] and retry_count < retries:
321 | time.sleep(backoff_time)
322 | need_to_retry = True
323 | retry_count = retry_count + 1
324 | else:
325 | need_to_retry = False
326 | break
327 | if not need_to_retry:
328 | raise
329 |
330 | conn.timeout = query_timeout
331 | return conn
332 |
333 | def init_connection_state(self):
334 | drv_name = self.connection.getinfo(Database.SQL_DRIVER_NAME).upper()
335 |
336 | if drv_name.startswith('LIBTDSODBC'):
337 | try:
338 | drv_ver = self.connection.getinfo(Database.SQL_DRIVER_VER)
339 | ver = get_version_tuple(drv_ver)[:2]
340 | if ver < (0, 95):
341 | raise ImproperlyConfigured(
342 | "FreeTDS 0.95 or newer is required.")
343 | except Exception:
344 | # unknown driver version
345 | pass
346 |
347 | ms_drv_names = re.compile('^(LIB)?(SQLNCLI|MSODBCSQL)')
348 |
349 | if ms_drv_names.match(drv_name):
350 | self.driver_charset = None
351 | # http://msdn.microsoft.com/en-us/library/ms131686.aspx
352 | self.supports_mars = True
353 | self.features.can_use_chunked_reads = True
354 |
355 | settings_dict = self.settings_dict
356 | cursor = self.create_cursor()
357 |
358 | options = settings_dict.get('OPTIONS', {})
359 | isolation_level = options.get('isolation_level', None)
360 | if isolation_level:
361 | cursor.execute('SET TRANSACTION ISOLATION LEVEL %s' % isolation_level)
362 |
363 | # Set date format for the connection. Also, make sure Sunday is
364 | # considered the first day of the week (to be consistent with the
365 | # Django convention for the 'week_day' Django lookup) if the user
366 | # hasn't told us otherwise
367 | datefirst = options.get('datefirst', 7)
368 | cursor.execute('SET DATEFORMAT ymd; SET DATEFIRST %s' % datefirst)
369 |
370 | val = self.get_system_datetime()
371 | if isinstance(val, str):
372 | raise ImproperlyConfigured(
373 | "The database driver doesn't support modern datatime types.")
374 |
375 | def is_usable(self):
376 | try:
377 | self.create_cursor().execute("SELECT 1")
378 | except Database.Error:
379 | return False
380 | else:
381 | return True
382 |
383 | def get_system_datetime(self):
384 | # http://blogs.msdn.com/b/sqlnativeclient/archive/2008/02/27/microsoft-sql-server-native-client-and-microsoft-sql-server-2008-native-client.aspx
385 | with self.temporary_connection() as cursor:
386 | if self.sql_server_version <= 2005:
387 | return cursor.execute('SELECT GETDATE()').fetchone()[0]
388 | else:
389 | return cursor.execute('SELECT SYSDATETIME()').fetchone()[0]
390 |
391 | @cached_property
392 | def sql_server_version(self, _known_versions={}):
393 | """
394 | Get the SQL server version
395 |
396 | The _known_versions default dictionary is created on the class. This is
397 | intentional - it allows us to cache this property's value across instances.
398 | Therefore, when Django creates a new database connection using the same
399 | alias, we won't need query the server again.
400 | """
401 | if self.alias not in _known_versions:
402 | with self.temporary_connection() as cursor:
403 | cursor.execute("SELECT CAST(SERVERPROPERTY('ProductVersion') AS varchar)")
404 | ver = cursor.fetchone()[0]
405 | ver = int(ver.split('.')[0])
406 | if ver not in self._sql_server_versions:
407 | raise NotSupportedError('SQL Server v%d is not supported.' % ver)
408 | _known_versions[self.alias] = self._sql_server_versions[ver]
409 | return _known_versions[self.alias]
410 |
411 | @cached_property
412 | def to_azure_sql_db(self, _known_azures={}):
413 | """
414 | Whether this connection is to a Microsoft Azure database server
415 |
416 | The _known_azures default dictionary is created on the class. This is
417 | intentional - it allows us to cache this property's value across instances.
418 | Therefore, when Django creates a new database connection using the same
419 | alias, we won't need query the server again.
420 | """
421 | if self.alias not in _known_azures:
422 | with self.temporary_connection() as cursor:
423 | cursor.execute("SELECT CAST(SERVERPROPERTY('EngineEdition') AS integer)")
424 | _known_azures[self.alias] = cursor.fetchone()[0] == EDITION_AZURE_SQL_DB
425 | return _known_azures[self.alias]
426 |
427 | def _execute_foreach(self, sql, table_names=None):
428 | cursor = self.cursor()
429 | if table_names is None:
430 | table_names = self.introspection.table_names(cursor)
431 | for table_name in table_names:
432 | cursor.execute(sql % self.ops.quote_name(table_name))
433 |
434 | def _get_trancount(self):
435 | with self.connection.cursor() as cursor:
436 | return cursor.execute('SELECT @@TRANCOUNT').fetchone()[0]
437 |
438 | def _on_error(self, e):
439 | if e.args[0] in self._codes_for_networkerror:
440 | try:
441 | # close the stale connection
442 | self.close()
443 | # wait a moment for recovery from network error
444 | time.sleep(self.connection_recovery_interval_msec)
445 | except Exception:
446 | pass
447 | self.connection = None
448 |
449 | def _savepoint(self, sid):
450 | with self.cursor() as cursor:
451 | cursor.execute('SELECT @@TRANCOUNT')
452 | trancount = cursor.fetchone()[0]
453 | if trancount == 0:
454 | cursor.execute(self.ops.start_transaction_sql())
455 | cursor.execute(self.ops.savepoint_create_sql(sid))
456 |
457 | def _savepoint_commit(self, sid):
458 | # SQL Server has no support for partial commit in a transaction
459 | pass
460 |
461 | def _savepoint_rollback(self, sid):
462 | with self.cursor() as cursor:
463 | # FreeTDS requires TRANCOUNT that is greater than 0
464 | cursor.execute('SELECT @@TRANCOUNT')
465 | trancount = cursor.fetchone()[0]
466 | if trancount > 0:
467 | cursor.execute(self.ops.savepoint_rollback_sql(sid))
468 |
469 | def _set_autocommit(self, autocommit):
470 | with self.wrap_database_errors:
471 | allowed = not autocommit
472 | if not allowed:
473 | # FreeTDS requires TRANCOUNT that is greater than 0
474 | allowed = self._get_trancount() > 0
475 | if allowed:
476 | self.connection.autocommit = autocommit
477 |
478 | def check_constraints(self, table_names=None):
479 | self._execute_foreach('ALTER TABLE %s WITH CHECK CHECK CONSTRAINT ALL',
480 | table_names)
481 |
482 | def disable_constraint_checking(self):
483 | if not self.needs_rollback:
484 | self.cursor().execute('EXEC sp_msforeachtable "ALTER TABLE ? NOCHECK CONSTRAINT ALL"')
485 | return not self.needs_rollback
486 |
487 | def enable_constraint_checking(self):
488 | if not self.needs_rollback:
489 | self.cursor().execute('EXEC sp_msforeachtable "ALTER TABLE ? WITH CHECK CHECK CONSTRAINT ALL"')
490 |
491 |
492 | class CursorWrapper(object):
493 | """
494 | A wrapper around the pyodbc's cursor that takes in account a) some pyodbc
495 | DB-API 2.0 implementation and b) some common ODBC driver particularities.
496 | """
497 |
498 | def __init__(self, cursor, connection):
499 | self.active = True
500 | self.cursor = cursor
501 | self.connection = connection
502 | self.driver_charset = connection.driver_charset
503 | self.last_sql = ''
504 | self.last_params = ()
505 |
506 | def close(self):
507 | if self.active:
508 | self.active = False
509 | self.cursor.close()
510 |
511 | def format_sql(self, sql, params):
512 | if self.driver_charset and isinstance(sql, str):
513 | # FreeTDS (and other ODBC drivers?) doesn't support Unicode
514 | # yet, so we need to encode the SQL clause itself in utf-8
515 | sql = smart_str(sql, self.driver_charset)
516 |
517 | # pyodbc uses '?' instead of '%s' as parameter placeholder.
518 | if params is not None:
519 | sql = sql % tuple('?' * len(params))
520 |
521 | return sql
522 |
523 | def format_params(self, params):
524 | fp = []
525 | if params is not None:
526 | for p in params:
527 | if isinstance(p, str):
528 | if self.driver_charset:
529 | # FreeTDS (and other ODBC drivers?) doesn't support Unicode
530 | # yet, so we need to encode parameters in utf-8
531 | fp.append(smart_str(p, self.driver_charset))
532 | else:
533 | fp.append(p)
534 |
535 | elif isinstance(p, bytes):
536 | fp.append(p)
537 |
538 | elif isinstance(p, type(True)):
539 | if p:
540 | fp.append(1)
541 | else:
542 | fp.append(0)
543 |
544 | else:
545 | fp.append(p)
546 |
547 | return tuple(fp)
548 |
549 | def execute(self, sql, params=None):
550 | self.last_sql = sql
551 | sql = self.format_sql(sql, params)
552 | params = self.format_params(params)
553 | self.last_params = params
554 | try:
555 | return self.cursor.execute(sql, params)
556 | except Database.Error as e:
557 | self.connection._on_error(e)
558 | raise
559 |
560 | def executemany(self, sql, params_list=()):
561 | if not params_list:
562 | return None
563 | raw_pll = [p for p in params_list]
564 | sql = self.format_sql(sql, raw_pll[0])
565 | params_list = [self.format_params(p) for p in raw_pll]
566 | try:
567 | return self.cursor.executemany(sql, params_list)
568 | except Database.Error as e:
569 | self.connection._on_error(e)
570 | raise
571 |
572 | def format_rows(self, rows):
573 | return list(map(self.format_row, rows))
574 |
575 | def format_row(self, row):
576 | """
577 | Decode data coming from the database if needed and convert rows to tuples
578 | (pyodbc Rows are not hashable).
579 | """
580 | if self.driver_charset:
581 | for i in range(len(row)):
582 | f = row[i]
583 | # FreeTDS (and other ODBC drivers?) doesn't support Unicode
584 | # yet, so we need to decode utf-8 data coming from the DB
585 | if isinstance(f, bytes):
586 | row[i] = f.decode(self.driver_charset)
587 | return tuple(row)
588 |
589 | def fetchone(self):
590 | row = self.cursor.fetchone()
591 | if row is not None:
592 | row = self.format_row(row)
593 | # Any remaining rows in the current set must be discarded
594 | # before changing autocommit mode when you use FreeTDS
595 | if not self.connection.supports_mars:
596 | self.cursor.nextset()
597 | return row
598 |
599 | def fetchmany(self, chunk):
600 | return self.format_rows(self.cursor.fetchmany(chunk))
601 |
602 | def fetchall(self):
603 | return self.format_rows(self.cursor.fetchall())
604 |
605 | def __getattr__(self, attr):
606 | if attr in self.__dict__:
607 | return self.__dict__[attr]
608 | return getattr(self.cursor, attr)
609 |
610 | def __iter__(self):
611 | return iter(self.cursor)
612 |
--------------------------------------------------------------------------------
/sql_server/pyodbc/schema.py:
--------------------------------------------------------------------------------
1 | import binascii
2 | import datetime
3 | import django
4 |
5 | from django.db.backends.base.schema import (
6 | BaseDatabaseSchemaEditor,
7 | _is_relevant_relation,
8 | _related_non_m2m_objects,
9 | logger,
10 | )
11 | from django.db.backends.ddl_references import (
12 | Columns,
13 | IndexName,
14 | Statement as DjStatement,
15 | Table,
16 | )
17 | from django.db.models import Index
18 | from django.db.models.fields import AutoField, BigAutoField
19 | from django.db.transaction import TransactionManagementError
20 | from django.utils.encoding import force_str
21 |
22 |
23 | class Statement(DjStatement):
24 | def __hash__(self):
25 | return hash((self.template, str(self.parts['name'])))
26 |
27 | def __eq__(self, other):
28 | return self.template == other.template and str(self.parts['name']) == str(other.parts['name'])
29 |
30 |
31 | class DatabaseSchemaEditor(BaseDatabaseSchemaEditor):
32 |
33 | _sql_check_constraint = " CONSTRAINT %(name)s CHECK (%(check)s)"
34 | _sql_select_default_constraint_name = "SELECT" \
35 | " d.name " \
36 | "FROM sys.default_constraints d " \
37 | "INNER JOIN sys.tables t ON" \
38 | " d.parent_object_id = t.object_id " \
39 | "INNER JOIN sys.columns c ON" \
40 | " d.parent_object_id = c.object_id AND" \
41 | " d.parent_column_id = c.column_id " \
42 | "INNER JOIN sys.schemas s ON" \
43 | " t.schema_id = s.schema_id " \
44 | "WHERE" \
45 | " t.name = %(table)s AND" \
46 | " c.name = %(column)s"
47 | _sql_select_foreign_key_constraints = "SELECT" \
48 | " po.name AS table_name," \
49 | " co.name AS constraint_name " \
50 | "FROM sys.foreign_key_columns fkc " \
51 | "INNER JOIN sys.objects co ON" \
52 | " fkc.constraint_object_id = co.object_id " \
53 | "INNER JOIN sys.tables po ON" \
54 | " fkc.parent_object_id = po.object_id " \
55 | "INNER JOIN sys.tables ro ON" \
56 | " fkc.referenced_object_id = ro.object_id " \
57 | "WHERE ro.name = %(table)s"
58 | sql_alter_column_default = "ADD DEFAULT %(default)s FOR %(column)s"
59 | sql_alter_column_no_default = "DROP CONSTRAINT %(column)s"
60 | sql_alter_column_not_null = "ALTER COLUMN %(column)s %(type)s NOT NULL"
61 | sql_alter_column_null = "ALTER COLUMN %(column)s %(type)s NULL"
62 | sql_alter_column_type = "ALTER COLUMN %(column)s %(type)s"
63 | sql_create_column = "ALTER TABLE %(table)s ADD %(column)s %(definition)s"
64 | sql_delete_column = "ALTER TABLE %(table)s DROP COLUMN %(column)s"
65 | sql_delete_index = "DROP INDEX %(name)s ON %(table)s"
66 | sql_delete_table = "DROP TABLE %(table)s"
67 | sql_rename_column = "EXEC sp_rename '%(table)s.%(old_column)s', %(new_column)s, 'COLUMN'"
68 | sql_rename_table = "EXEC sp_rename %(old_table)s, %(new_table)s"
69 | sql_create_unique_null = "CREATE UNIQUE INDEX %(name)s ON %(table)s(%(columns)s) " \
70 | "WHERE %(columns)s IS NOT NULL"
71 |
72 | def _alter_column_default_sql(self, model, old_field, new_field, drop=False):
73 | """
74 | Hook to specialize column default alteration.
75 |
76 | Return a (sql, params) fragment to add or drop (depending on the drop
77 | argument) a default to new_field's column.
78 | """
79 | new_default = self.effective_default(new_field)
80 | default = '%s'
81 | params = [new_default]
82 | column = self.quote_name(new_field.column)
83 |
84 | if drop:
85 | params = []
86 | # SQL Server requires the name of the default constraint
87 | result = self.execute(
88 | self._sql_select_default_constraint_name % {
89 | "table": self.quote_value(model._meta.db_table),
90 | "column": self.quote_value(new_field.column),
91 | },
92 | has_result=True
93 | )
94 | if result:
95 | for row in result:
96 | column = self.quote_name(next(iter(row)))
97 | elif self.connection.features.requires_literal_defaults:
98 | # Some databases (Oracle) can't take defaults as a parameter
99 | # If this is the case, the SchemaEditor for that database should
100 | # implement prepare_default().
101 | default = self.prepare_default(new_default)
102 | params = []
103 |
104 | new_db_params = new_field.db_parameters(connection=self.connection)
105 | sql = self.sql_alter_column_no_default if drop else self.sql_alter_column_default
106 | return (
107 | sql % {
108 | 'column': column,
109 | 'type': new_db_params['type'],
110 | 'default': default,
111 | },
112 | params,
113 | )
114 |
115 | def _alter_column_null_sql(self, model, old_field, new_field):
116 | """
117 | Hook to specialize column null alteration.
118 |
119 | Return a (sql, params) fragment to set a column to null or non-null
120 | as required by new_field, or None if no changes are required.
121 | """
122 | if (self.connection.features.interprets_empty_strings_as_nulls and
123 | new_field.get_internal_type() in ("CharField", "TextField")):
124 | # The field is nullable in the database anyway, leave it alone.
125 | return
126 | else:
127 | new_db_params = new_field.db_parameters(connection=self.connection)
128 | sql = self.sql_alter_column_null if new_field.null else self.sql_alter_column_not_null
129 | return (
130 | sql % {
131 | 'column': self.quote_name(new_field.column),
132 | 'type': new_db_params['type'],
133 | },
134 | [],
135 | )
136 |
137 | def _alter_column_type_sql(self, model, old_field, new_field, new_type):
138 | new_type = self._set_field_new_type_null_status(old_field, new_type)
139 | return super()._alter_column_type_sql(model, old_field, new_field, new_type)
140 |
141 | def alter_unique_together(self, model, old_unique_together, new_unique_together):
142 | """
143 | Deal with a model changing its unique_together. The input
144 | unique_togethers must be doubly-nested, not the single-nested
145 | ["foo", "bar"] format.
146 | """
147 | olds = {tuple(fields) for fields in old_unique_together}
148 | news = {tuple(fields) for fields in new_unique_together}
149 | # Deleted uniques
150 | for fields in olds.difference(news):
151 | self._delete_composed_index(model, fields, {'unique': True}, self.sql_delete_index)
152 | # Created uniques
153 | for fields in news.difference(olds):
154 | columns = [model._meta.get_field(field).column for field in fields]
155 | condition = ' AND '.join(["[%s] IS NOT NULL" % col for col in columns])
156 | sql = self._create_unique_sql(model, columns, condition=condition)
157 | self.execute(sql)
158 |
159 | def _model_indexes_sql(self, model):
160 | """
161 | Return a list of all index SQL statements (field indexes,
162 | index_together, Meta.indexes) for the specified model.
163 | """
164 | if not model._meta.managed or model._meta.proxy or model._meta.swapped:
165 | return []
166 | output = []
167 | for field in model._meta.local_fields:
168 | output.extend(self._field_indexes_sql(model, field))
169 |
170 | for field_names in model._meta.index_together:
171 | fields = [model._meta.get_field(field) for field in field_names]
172 | output.append(self._create_index_sql(model, fields, suffix="_idx"))
173 |
174 | for field_names in model._meta.unique_together:
175 | columns = [model._meta.get_field(field).column for field in field_names]
176 | condition = ' AND '.join(["[%s] IS NOT NULL" % col for col in columns])
177 | sql = self._create_unique_sql(model, columns, condition=condition)
178 | output.append(sql)
179 |
180 | for index in model._meta.indexes:
181 | output.append(index.create_sql(model, self))
182 | return output
183 |
184 | def _alter_many_to_many(self, model, old_field, new_field, strict):
185 | """Alter M2Ms to repoint their to= endpoints."""
186 |
187 | for idx in self._constraint_names(old_field.remote_field.through, index=True, unique=True):
188 | self.execute(self.sql_delete_index % {'name': idx, 'table': old_field.remote_field.through._meta.db_table})
189 |
190 | return super()._alter_many_to_many(model, old_field, new_field, strict)
191 |
192 | def _db_table_constraint_names(self, db_table, column_names=None, unique=None,
193 | primary_key=None, index=None, foreign_key=None,
194 | check=None, type_=None, exclude=None):
195 | """Return all constraint names matching the columns and conditions."""
196 | if column_names is not None:
197 | column_names = [
198 | self.connection.introspection.identifier_converter(name)
199 | for name in column_names
200 | ]
201 | with self.connection.cursor() as cursor:
202 | constraints = self.connection.introspection.get_constraints(cursor, db_table)
203 | result = []
204 | for name, infodict in constraints.items():
205 | if column_names is None or column_names == infodict['columns']:
206 | if unique is not None and infodict['unique'] != unique:
207 | continue
208 | if primary_key is not None and infodict['primary_key'] != primary_key:
209 | continue
210 | if index is not None and infodict['index'] != index:
211 | continue
212 | if check is not None and infodict['check'] != check:
213 | continue
214 | if foreign_key is not None and not infodict['foreign_key']:
215 | continue
216 | if type_ is not None and infodict['type'] != type_:
217 | continue
218 | if not exclude or name not in exclude:
219 | result.append(name)
220 | return result
221 |
222 | def _db_table_delete_constraint_sql(self, template, db_table, name):
223 | return Statement(
224 | template,
225 | table=Table(db_table, self.quote_name),
226 | name=self.quote_name(name),
227 | )
228 |
229 | def alter_db_table(self, model, old_db_table, new_db_table):
230 | index_names = self._db_table_constraint_names(old_db_table, index=True)
231 | for index_name in index_names:
232 | self.execute(self._db_table_delete_constraint_sql(self.sql_delete_index, old_db_table, index_name))
233 |
234 | index_names = self._db_table_constraint_names(new_db_table, index=True)
235 | for index_name in index_names:
236 | self.execute(self._db_table_delete_constraint_sql(self.sql_delete_index, new_db_table, index_name))
237 |
238 | return super().alter_db_table(model, old_db_table, new_db_table)
239 |
240 | def _alter_field(self, model, old_field, new_field, old_type, new_type,
241 | old_db_params, new_db_params, strict=False):
242 | """Actually perform a "physical" (non-ManyToMany) field update."""
243 |
244 | # the backend doesn't support altering from/to (Big)AutoField
245 | # because of the limited capability of SQL Server to edit IDENTITY property
246 | for t in (AutoField, BigAutoField):
247 | if isinstance(old_field, t) or isinstance(new_field, t):
248 | raise NotImplementedError("the backend doesn't support altering from/to %s." % t.__name__)
249 | # Drop any FK constraints, we'll remake them later
250 | fks_dropped = set()
251 | if old_field.remote_field and old_field.db_constraint:
252 | # Drop index, SQL Server requires explicit deletion
253 | if not hasattr(new_field, 'db_constraint') or not new_field.db_constraint:
254 | index_names = self._constraint_names(model, [old_field.column], index=True)
255 | for index_name in index_names:
256 | self.execute(self._delete_constraint_sql(self.sql_delete_index, model, index_name))
257 |
258 | fk_names = self._constraint_names(model, [old_field.column], foreign_key=True)
259 | if strict and len(fk_names) != 1:
260 | raise ValueError("Found wrong number (%s) of foreign key constraints for %s.%s" % (
261 | len(fk_names),
262 | model._meta.db_table,
263 | old_field.column,
264 | ))
265 | for fk_name in fk_names:
266 | fks_dropped.add((old_field.column,))
267 | self.execute(self._delete_constraint_sql(self.sql_delete_fk, model, fk_name))
268 | # Has unique been removed?
269 | if old_field.unique and (not new_field.unique or self._field_became_primary_key(old_field, new_field)):
270 | # Find the unique constraint for this field
271 | constraint_names = self._constraint_names(model, [old_field.column], unique=True, primary_key=False)
272 | if strict and len(constraint_names) != 1:
273 | raise ValueError("Found wrong number (%s) of unique constraints for %s.%s" % (
274 | len(constraint_names),
275 | model._meta.db_table,
276 | old_field.column,
277 | ))
278 | for constraint_name in constraint_names:
279 | self.execute(self._delete_constraint_sql(self.sql_delete_unique, model, constraint_name))
280 | # Drop incoming FK constraints if the field is a primary key or unique,
281 | # which might be a to_field target, and things are going to change.
282 | drop_foreign_keys = (
283 | (
284 | (old_field.primary_key and new_field.primary_key) or
285 | (old_field.unique and new_field.unique)
286 | ) and old_type != new_type
287 | )
288 | if drop_foreign_keys:
289 | # '_meta.related_field' also contains M2M reverse fields, these
290 | # will be filtered out
291 | for _old_rel, new_rel in _related_non_m2m_objects(old_field, new_field):
292 | rel_fk_names = self._constraint_names(
293 | new_rel.related_model, [new_rel.field.column], foreign_key=True
294 | )
295 | for fk_name in rel_fk_names:
296 | self.execute(self._delete_constraint_sql(self.sql_delete_fk, new_rel.related_model, fk_name))
297 | # Removed an index? (no strict check, as multiple indexes are possible)
298 | # Remove indexes if db_index switched to False or a unique constraint
299 | # will now be used in lieu of an index. The following lines from the
300 | # truth table show all True cases; the rest are False:
301 | #
302 | # old_field.db_index | old_field.unique | new_field.db_index | new_field.unique
303 | # ------------------------------------------------------------------------------
304 | # True | False | False | False
305 | # True | False | False | True
306 | # True | False | True | True
307 | if (old_field.db_index and not old_field.unique and (not new_field.db_index or new_field.unique)) or (
308 | # Drop indexes on nvarchar columns that are changing to a different type
309 | # SQL Server requires explicit deletion
310 | (old_field.db_index or old_field.unique) and (
311 | (old_type.startswith('nvarchar') and not new_type.startswith('nvarchar'))
312 | )):
313 | # Find the index for this field
314 | meta_index_names = {index.name for index in model._meta.indexes}
315 | # Retrieve only BTREE indexes since this is what's created with
316 | # db_index=True.
317 | index_names = self._constraint_names(model, [old_field.column], index=True, type_=Index.suffix)
318 | for index_name in index_names:
319 | if index_name not in meta_index_names:
320 | # The only way to check if an index was created with
321 | # db_index=True or with Index(['field'], name='foo')
322 | # is to look at its name (refs #28053).
323 | self.execute(self._delete_constraint_sql(self.sql_delete_index, model, index_name))
324 | # Change check constraints?
325 | if (old_db_params['check'] != new_db_params['check'] and old_db_params['check']) or (
326 | # SQL Server requires explicit deletion befor altering column type with the same constraint
327 | old_db_params['check'] == new_db_params['check'] and old_db_params['check'] and
328 | old_db_params['type'] != new_db_params['type']
329 | ):
330 | constraint_names = self._constraint_names(model, [old_field.column], check=True)
331 | if strict and len(constraint_names) != 1:
332 | raise ValueError("Found wrong number (%s) of check constraints for %s.%s" % (
333 | len(constraint_names),
334 | model._meta.db_table,
335 | old_field.column,
336 | ))
337 | for constraint_name in constraint_names:
338 | self.execute(self._delete_constraint_sql(self.sql_delete_check, model, constraint_name))
339 | # Have they renamed the column?
340 | if old_field.column != new_field.column:
341 | # remove old indices
342 | self._delete_indexes(model, old_field, new_field)
343 |
344 | self.execute(self._rename_field_sql(model._meta.db_table, old_field, new_field, new_type))
345 | # Rename all references to the renamed column.
346 | for sql in self.deferred_sql:
347 | if isinstance(sql, DjStatement):
348 | sql.rename_column_references(model._meta.db_table, old_field.column, new_field.column)
349 |
350 | # Next, start accumulating actions to do
351 | actions = []
352 | null_actions = []
353 | post_actions = []
354 | # Type change?
355 | if old_type != new_type:
356 | fragment, other_actions = self._alter_column_type_sql(model, old_field, new_field, new_type)
357 | actions.append(fragment)
358 | post_actions.extend(other_actions)
359 | # Drop unique constraint, SQL Server requires explicit deletion
360 | self._delete_unique_constraints(model, old_field, new_field, strict)
361 | # Drop indexes, SQL Server requires explicit deletion
362 | self._delete_indexes(model, old_field, new_field)
363 | # When changing a column NULL constraint to NOT NULL with a given
364 | # default value, we need to perform 4 steps:
365 | # 1. Add a default for new incoming writes
366 | # 2. Update existing NULL rows with new default
367 | # 3. Replace NULL constraint with NOT NULL
368 | # 4. Drop the default again.
369 | # Default change?
370 | old_default = self.effective_default(old_field)
371 | new_default = self.effective_default(new_field)
372 | needs_database_default = (
373 | old_field.null and
374 | not new_field.null and
375 | old_default != new_default and
376 | new_default is not None and
377 | not self.skip_default(new_field)
378 | )
379 | if needs_database_default:
380 | actions.append(self._alter_column_default_sql(model, old_field, new_field))
381 | # Nullability change?
382 | if old_field.null != new_field.null:
383 | fragment = self._alter_column_null_sql(model, old_field, new_field)
384 | if fragment:
385 | null_actions.append(fragment)
386 | if not new_field.null:
387 | # Drop unique constraint, SQL Server requires explicit deletion
388 | self._delete_unique_constraints(model, old_field, new_field, strict)
389 | # Drop indexes, SQL Server requires explicit deletion
390 | self._delete_indexes(model, old_field, new_field)
391 | # Only if we have a default and there is a change from NULL to NOT NULL
392 | four_way_default_alteration = (
393 | new_field.has_default() and
394 | (old_field.null and not new_field.null)
395 | )
396 | if actions or null_actions:
397 | if not four_way_default_alteration:
398 | # If we don't have to do a 4-way default alteration we can
399 | # directly run a (NOT) NULL alteration
400 | actions = actions + null_actions
401 | # Combine actions together if we can (e.g. postgres)
402 | if self.connection.features.supports_combined_alters and actions:
403 | sql, params = tuple(zip(*actions))
404 | actions = [(", ".join(sql), sum(params, []))]
405 | # Apply those actions
406 | for sql, params in actions:
407 | self._delete_indexes(model, old_field, new_field)
408 | self.execute(
409 | self.sql_alter_column % {
410 | "table": self.quote_name(model._meta.db_table),
411 | "changes": sql,
412 | },
413 | params,
414 | )
415 | if four_way_default_alteration:
416 | # Update existing rows with default value
417 | self.execute(
418 | self.sql_update_with_default % {
419 | "table": self.quote_name(model._meta.db_table),
420 | "column": self.quote_name(new_field.column),
421 | "default": "%s",
422 | },
423 | [new_default],
424 | )
425 | # Since we didn't run a NOT NULL change before we need to do it
426 | # now
427 | for sql, params in null_actions:
428 | self.execute(
429 | self.sql_alter_column % {
430 | "table": self.quote_name(model._meta.db_table),
431 | "changes": sql,
432 | },
433 | params,
434 | )
435 | if post_actions:
436 | for sql, params in post_actions:
437 | self.execute(sql, params)
438 | # If primary_key changed to False, delete the primary key constraint.
439 | if old_field.primary_key and not new_field.primary_key:
440 | self._delete_primary_key(model, strict)
441 | # Added a unique?
442 | if self._unique_should_be_added(old_field, new_field):
443 | if (self.connection.features.supports_nullable_unique_constraints and
444 | not new_field.many_to_many and new_field.null):
445 |
446 | self.execute(
447 | self._create_index_sql(
448 | model, [new_field], sql=self.sql_create_unique_null, suffix="_uniq"
449 | )
450 | )
451 | else:
452 | self.execute(self._create_unique_sql(model, [new_field.column]))
453 | # Added an index?
454 | # constraint will no longer be used in lieu of an index. The following
455 | # lines from the truth table show all True cases; the rest are False:
456 | #
457 | # old_field.db_index | old_field.unique | new_field.db_index | new_field.unique
458 | # ------------------------------------------------------------------------------
459 | # False | False | True | False
460 | # False | True | True | False
461 | # True | True | True | False
462 | if (not old_field.db_index or old_field.unique) and new_field.db_index and not new_field.unique:
463 | self.execute(self._create_index_sql(model, [new_field]))
464 |
465 | # Restore indexes & unique constraints deleted above, SQL Server requires explicit restoration
466 | if (old_type != new_type or (old_field.null and not new_field.null)) and (
467 | old_field.column == new_field.column
468 | ):
469 | # Restore unique constraints
470 | # Note: if nullable they are implemented via an explicit filtered UNIQUE INDEX (not CONSTRAINT)
471 | # in order to get ANSI-compliant NULL behaviour (i.e. NULL != NULL, multiple are allowed)
472 | if old_field.unique and new_field.unique:
473 | if new_field.null:
474 | self.execute(
475 | self._create_index_sql(
476 | model, [old_field], sql=self.sql_create_unique_null, suffix="_uniq"
477 | )
478 | )
479 | else:
480 | self.execute(self._create_unique_sql(model, columns=[old_field.column]))
481 | else:
482 | for fields in model._meta.unique_together:
483 | columns = [model._meta.get_field(field).column for field in fields]
484 | if old_field.column in columns:
485 | condition = ' AND '.join(["[%s] IS NOT NULL" % col for col in columns])
486 | self.execute(self._create_unique_sql(model, columns, condition=condition))
487 | # Restore indexes
488 | index_columns = []
489 | if old_field.db_index and new_field.db_index:
490 | index_columns.append([old_field])
491 | else:
492 | for fields in model._meta.index_together:
493 | columns = [model._meta.get_field(field) for field in fields]
494 | if old_field.column in [c.column for c in columns]:
495 | index_columns.append(columns)
496 | if index_columns:
497 | for columns in index_columns:
498 | self.execute(self._create_index_sql(model, columns, suffix='_idx'))
499 | # Type alteration on primary key? Then we need to alter the column
500 | # referring to us.
501 | rels_to_update = []
502 | if old_field.primary_key and new_field.primary_key and old_type != new_type:
503 | rels_to_update.extend(_related_non_m2m_objects(old_field, new_field))
504 | # Changed to become primary key?
505 | if self._field_became_primary_key(old_field, new_field):
506 | # Make the new one
507 | self.execute(
508 | self.sql_create_pk % {
509 | "table": self.quote_name(model._meta.db_table),
510 | "name": self.quote_name(
511 | self._create_index_name(model._meta.db_table, [new_field.column], suffix="_pk")
512 | ),
513 | "columns": self.quote_name(new_field.column),
514 | }
515 | )
516 | # Update all referencing columns
517 | rels_to_update.extend(_related_non_m2m_objects(old_field, new_field))
518 | # Handle our type alters on the other end of rels from the PK stuff above
519 | for old_rel, new_rel in rels_to_update:
520 | rel_db_params = new_rel.field.db_parameters(connection=self.connection)
521 | rel_type = rel_db_params['type']
522 | fragment, other_actions = self._alter_column_type_sql(
523 | new_rel.related_model, old_rel.field, new_rel.field, rel_type
524 | )
525 | self.execute(
526 | self.sql_alter_column % {
527 | "table": self.quote_name(new_rel.related_model._meta.db_table),
528 | "changes": fragment[0],
529 | },
530 | fragment[1],
531 | )
532 | for sql, params in other_actions:
533 | self.execute(sql, params)
534 | # Does it have a foreign key?
535 | if (new_field.remote_field and
536 | (fks_dropped or not old_field.remote_field or not old_field.db_constraint) and
537 | new_field.db_constraint):
538 | self.execute(self._create_fk_sql(model, new_field, "_fk_%(to_table)s_%(to_column)s"))
539 | # Rebuild FKs that pointed to us if we previously had to drop them
540 | if drop_foreign_keys:
541 | for rel in new_field.model._meta.related_objects:
542 | if _is_relevant_relation(rel, new_field) and rel.field.db_constraint:
543 | self.execute(self._create_fk_sql(rel.related_model, rel.field, "_fk"))
544 | # Does it have check constraints we need to add?
545 | if (old_db_params['check'] != new_db_params['check'] and new_db_params['check']) or (
546 | # SQL Server requires explicit creation after altering column type with the same constraint
547 | old_db_params['check'] == new_db_params['check'] and new_db_params['check'] and
548 | old_db_params['type'] != new_db_params['type']
549 | ):
550 | self.execute(
551 | self.sql_create_check % {
552 | "table": self.quote_name(model._meta.db_table),
553 | "name": self.quote_name(
554 | self._create_index_name(model._meta.db_table, [new_field.column], suffix="_check")
555 | ),
556 | "column": self.quote_name(new_field.column),
557 | "check": new_db_params['check'],
558 | }
559 | )
560 | # Drop the default if we need to
561 | # (Django usually does not use in-database defaults)
562 | if needs_database_default:
563 | changes_sql, params = self._alter_column_default_sql(model, old_field, new_field, drop=True)
564 | sql = self.sql_alter_column % {
565 | "table": self.quote_name(model._meta.db_table),
566 | "changes": changes_sql,
567 | }
568 | self.execute(sql, params)
569 |
570 | # Reset connection if required
571 | if self.connection.features.connection_persists_old_columns:
572 | self.connection.close()
573 |
574 | def _delete_indexes(self, model, old_field, new_field):
575 | index_columns = []
576 | if old_field.db_index and new_field.db_index:
577 | index_columns.append([old_field.column])
578 | for fields in model._meta.index_together:
579 | columns = [model._meta.get_field(field).column for field in fields]
580 | if old_field.column in columns:
581 | index_columns.append(columns)
582 |
583 | for fields in model._meta.unique_together:
584 | columns = [model._meta.get_field(field).column for field in fields]
585 | if old_field.column in columns:
586 | index_columns.append(columns)
587 | if index_columns:
588 | for columns in index_columns:
589 | index_names = self._constraint_names(model, columns, index=True)
590 | for index_name in index_names:
591 | self.execute(self._delete_constraint_sql(self.sql_delete_index, model, index_name))
592 |
593 | def _delete_unique_constraints(self, model, old_field, new_field, strict=False):
594 | unique_columns = []
595 | if old_field.unique and new_field.unique:
596 | unique_columns.append([old_field.column])
597 | if unique_columns:
598 | for columns in unique_columns:
599 | constraint_names_normal = self._constraint_names(model, columns, unique=True, index=False)
600 | constraint_names_index = self._constraint_names(model, columns, unique=True, index=True)
601 | constraint_names = constraint_names_normal + constraint_names_index
602 | if strict and len(constraint_names) != 1:
603 | raise ValueError("Found wrong number (%s) of unique constraints for %s.%s" % (
604 | len(constraint_names),
605 | model._meta.db_table,
606 | old_field.column,
607 | ))
608 | for constraint_name in constraint_names_normal:
609 | self.execute(self._delete_constraint_sql(self.sql_delete_unique, model, constraint_name))
610 | # Unique indexes which are not table constraints must be deleted using the appropriate SQL.
611 | # These may exist for example to enforce ANSI-compliant unique constraints on nullable columns.
612 | for index_name in constraint_names_index:
613 | self.execute(self._delete_constraint_sql(self.sql_delete_index, model, index_name))
614 |
615 | def _rename_field_sql(self, table, old_field, new_field, new_type):
616 | new_type = self._set_field_new_type_null_status(old_field, new_type)
617 | return super()._rename_field_sql(table, old_field, new_field, new_type)
618 |
619 | def _set_field_new_type_null_status(self, field, new_type):
620 | """
621 | Keep the null property of the old field. If it has changed, it will be
622 | handled separately.
623 | """
624 | if field.null:
625 | new_type += " NULL"
626 | else:
627 | new_type += " NOT NULL"
628 | return new_type
629 |
630 | def add_field(self, model, field):
631 | """
632 | Create a field on a model. Usually involves adding a column, but may
633 | involve adding a table instead (for M2M fields).
634 | """
635 | # Special-case implicit M2M tables
636 | if field.many_to_many and field.remote_field.through._meta.auto_created:
637 | return self.create_model(field.remote_field.through)
638 | # Get the column's definition
639 | definition, params = self.column_sql(model, field, include_default=True)
640 | # It might not actually have a column behind it
641 | if definition is None:
642 | return
643 |
644 | if (self.connection.features.supports_nullable_unique_constraints and
645 | not field.many_to_many and field.null and field.unique):
646 |
647 | definition = definition.replace(' UNIQUE', '')
648 | self.deferred_sql.append(self._create_index_sql(
649 | model, [field], sql=self.sql_create_unique_null, suffix="_uniq"
650 | ))
651 |
652 | # Check constraints can go on the column SQL here
653 | db_params = field.db_parameters(connection=self.connection)
654 | if db_params['check']:
655 | definition += " CHECK (%s)" % db_params['check']
656 | # Build the SQL and run it
657 | sql = self.sql_create_column % {
658 | "table": self.quote_name(model._meta.db_table),
659 | "column": self.quote_name(field.column),
660 | "definition": definition,
661 | }
662 | self.execute(sql, params)
663 | # Drop the default if we need to
664 | # (Django usually does not use in-database defaults)
665 | if not self.skip_default(field) and self.effective_default(field) is not None:
666 | changes_sql, params = self._alter_column_default_sql(model, None, field, drop=True)
667 | sql = self.sql_alter_column % {
668 | "table": self.quote_name(model._meta.db_table),
669 | "changes": changes_sql,
670 | }
671 | self.execute(sql, params)
672 | # Add an index, if required
673 | self.deferred_sql.extend(self._field_indexes_sql(model, field))
674 | # Add any FK constraints later
675 | if field.remote_field and self.connection.features.supports_foreign_keys and field.db_constraint:
676 | self.deferred_sql.append(self._create_fk_sql(model, field, "_fk_%(to_table)s_%(to_column)s"))
677 | # Reset connection if required
678 | if self.connection.features.connection_persists_old_columns:
679 | self.connection.close()
680 |
681 | def _create_unique_sql(self, model, columns, name=None, condition=None, deferrable=None):
682 | if (deferrable and not getattr(self.connection.features, 'supports_deferrable_unique_constraints', False)):
683 | return None
684 |
685 | def create_unique_name(*args, **kwargs):
686 | return self.quote_name(self._create_index_name(*args, **kwargs))
687 |
688 | table = Table(model._meta.db_table, self.quote_name)
689 | if name is None:
690 | name = IndexName(model._meta.db_table, columns, '_uniq', create_unique_name)
691 | else:
692 | name = self.quote_name(name)
693 | columns = Columns(table, columns, self.quote_name)
694 | statement_args = {
695 | "deferrable": self._deferrable_constraint_sql(deferrable)
696 | } if django.VERSION >= (3, 1) else {}
697 |
698 | if condition:
699 | return Statement(
700 | self.sql_create_unique_index,
701 | table=table,
702 | name=name,
703 | columns=columns,
704 | condition=' WHERE ' + condition,
705 | **statement_args
706 | ) if self.connection.features.supports_partial_indexes else None
707 | else:
708 | return Statement(
709 | self.sql_create_unique,
710 | table=table,
711 | name=name,
712 | columns=columns,
713 | **statement_args
714 | )
715 |
716 | def _create_index_sql(self, model, fields, *, name=None, suffix='', using='',
717 | db_tablespace=None, col_suffixes=(), sql=None, opclasses=(),
718 | condition=None):
719 | """
720 | Return the SQL statement to create the index for one or several fields.
721 | `sql` can be specified if the syntax differs from the standard (GIS
722 | indexes, ...).
723 | """
724 | tablespace_sql = self._get_index_tablespace_sql(model, fields, db_tablespace=db_tablespace)
725 | columns = [field.column for field in fields]
726 | sql_create_index = sql or self.sql_create_index
727 | table = model._meta.db_table
728 |
729 | def create_index_name(*args, **kwargs):
730 | nonlocal name
731 | if name is None:
732 | name = self._create_index_name(*args, **kwargs)
733 | return self.quote_name(name)
734 |
735 | return Statement(
736 | sql_create_index,
737 | table=Table(table, self.quote_name),
738 | name=IndexName(table, columns, suffix, create_index_name),
739 | using=using,
740 | columns=self._index_columns(table, columns, col_suffixes, opclasses),
741 | extra=tablespace_sql,
742 | condition=(' WHERE ' + condition) if condition else '',
743 | )
744 |
745 | def create_model(self, model):
746 | """
747 | Takes a model and creates a table for it in the database.
748 | Will also create any accompanying indexes or unique constraints.
749 | """
750 | # Create column SQL, add FK deferreds if needed
751 | column_sqls = []
752 | params = []
753 | for field in model._meta.local_fields:
754 | # SQL
755 | definition, extra_params = self.column_sql(model, field)
756 | if definition is None:
757 | continue
758 |
759 | if (self.connection.features.supports_nullable_unique_constraints and
760 | not field.many_to_many and field.null and field.unique):
761 |
762 | definition = definition.replace(' UNIQUE', '')
763 | self.deferred_sql.append(self._create_index_sql(
764 | model, [field], sql=self.sql_create_unique_null, suffix="_uniq"
765 | ))
766 |
767 | # Check constraints can go on the column SQL here
768 | db_params = field.db_parameters(connection=self.connection)
769 | if db_params['check']:
770 | # SQL Server requires a name for the check constraint
771 | definition += self._sql_check_constraint % {
772 | "name": self._create_index_name(model._meta.db_table, [field.column], suffix="_check"),
773 | "check": db_params['check']
774 | }
775 | # Autoincrement SQL (for backends with inline variant)
776 | col_type_suffix = field.db_type_suffix(connection=self.connection)
777 | if col_type_suffix:
778 | definition += " %s" % col_type_suffix
779 | params.extend(extra_params)
780 | # FK
781 | if field.remote_field and field.db_constraint:
782 | to_table = field.remote_field.model._meta.db_table
783 | to_column = field.remote_field.model._meta.get_field(field.remote_field.field_name).column
784 | if self.sql_create_inline_fk:
785 | definition += " " + self.sql_create_inline_fk % {
786 | "to_table": self.quote_name(to_table),
787 | "to_column": self.quote_name(to_column),
788 | }
789 | elif self.connection.features.supports_foreign_keys:
790 | self.deferred_sql.append(self._create_fk_sql(model, field, "_fk_%(to_table)s_%(to_column)s"))
791 | # Add the SQL to our big list
792 | column_sqls.append("%s %s" % (
793 | self.quote_name(field.column),
794 | definition,
795 | ))
796 | # Autoincrement SQL (for backends with post table definition variant)
797 | if field.get_internal_type() in ("AutoField", "BigAutoField"):
798 | autoinc_sql = self.connection.ops.autoinc_sql(model._meta.db_table, field.column)
799 | if autoinc_sql:
800 | self.deferred_sql.extend(autoinc_sql)
801 |
802 | # Add any unique_togethers (always deferred, as some fields might be
803 | # created afterwards, like geometry fields with some backends)
804 | for fields in model._meta.unique_together:
805 | columns = [model._meta.get_field(field).column for field in fields]
806 | condition = ' AND '.join(["[%s] IS NOT NULL" % col for col in columns])
807 | self.deferred_sql.append(self._create_unique_sql(model, columns, condition=condition))
808 |
809 | # Make the table
810 | sql = self.sql_create_table % {
811 | "table": self.quote_name(model._meta.db_table),
812 | "definition": ", ".join(column_sqls)
813 | }
814 | if model._meta.db_tablespace:
815 | tablespace_sql = self.connection.ops.tablespace_sql(model._meta.db_tablespace)
816 | if tablespace_sql:
817 | sql += ' ' + tablespace_sql
818 | # Prevent using [] as params, in the case a literal '%' is used in the definition
819 | self.execute(sql, params or None)
820 |
821 | # Add any field index and index_together's (deferred as SQLite3 _remake_table needs it)
822 | self.deferred_sql.extend(self._model_indexes_sql(model))
823 | self.deferred_sql = list(set(self.deferred_sql))
824 |
825 | # Make M2M tables
826 | for field in model._meta.local_many_to_many:
827 | if field.remote_field.through._meta.auto_created:
828 | self.create_model(field.remote_field.through)
829 |
830 | def delete_model(self, model):
831 | """
832 | Deletes a model from the database.
833 | """
834 | # Delete the foreign key constraints
835 | result = self.execute(
836 | self._sql_select_foreign_key_constraints % {
837 | "table": self.quote_value(model._meta.db_table),
838 | },
839 | has_result=True
840 | )
841 | if result:
842 | for table, constraint in result:
843 | sql = self.sql_alter_column % {
844 | "table": self.quote_name(table),
845 | "changes": self.sql_alter_column_no_default % {
846 | "column": self.quote_name(constraint),
847 | }
848 | }
849 | self.execute(sql)
850 |
851 | # Delete the table
852 | super().delete_model(model)
853 | # Remove all deferred statements referencing the deleted table.
854 | for sql in list(self.deferred_sql):
855 | if isinstance(sql, Statement) and sql.references_table(model._meta.db_table):
856 | self.deferred_sql.remove(sql)
857 |
858 | def execute(self, sql, params=(), has_result=False):
859 | """
860 | Executes the given SQL statement, with optional parameters.
861 | """
862 | result = None
863 | # Don't perform the transactional DDL check if SQL is being collected
864 | # as it's not going to be executed anyway.
865 | if not self.collect_sql and self.connection.in_atomic_block and not self.connection.features.can_rollback_ddl:
866 | raise TransactionManagementError(
867 | "Executing DDL statements while in a transaction on databases "
868 | "that can't perform a rollback is prohibited."
869 | )
870 | # Account for non-string statement objects.
871 | sql = str(sql)
872 | # Log the command we're running, then run it
873 | logger.debug("%s; (params %r)", sql, params, extra={'params': params, 'sql': sql})
874 | if self.collect_sql:
875 | ending = "" if sql.endswith(";") else ";"
876 | if params is not None:
877 | self.collected_sql.append((sql % tuple(map(self.quote_value, params))) + ending)
878 | else:
879 | self.collected_sql.append(sql + ending)
880 | else:
881 | cursor = self.connection.cursor()
882 | cursor.execute(sql, params)
883 | if has_result:
884 | result = cursor.fetchall()
885 | # the cursor can be closed only when the driver supports opening
886 | # multiple cursors on a connection because the migration command
887 | # has already opened a cursor outside this method
888 | if self.connection.supports_mars:
889 | cursor.close()
890 | return result
891 |
892 | def prepare_default(self, value):
893 | return self.quote_value(value)
894 |
895 | def quote_value(self, value):
896 | """
897 | Returns a quoted version of the value so it's safe to use in an SQL
898 | string. This is not safe against injection from user code; it is
899 | intended only for use in making SQL scripts or preparing default values
900 | for particularly tricky backends (defaults are not user-defined, though,
901 | so this is safe).
902 | """
903 | if isinstance(value, (datetime.datetime, datetime.date, datetime.time)):
904 | return "'%s'" % value
905 | elif isinstance(value, str):
906 | return "'%s'" % value.replace("'", "''")
907 | elif isinstance(value, (bytes, bytearray, memoryview)):
908 | return "0x%s" % force_str(binascii.hexlify(value))
909 | elif isinstance(value, bool):
910 | return "1" if value else "0"
911 | else:
912 | return str(value)
913 |
914 | def remove_field(self, model, field):
915 | """
916 | Removes a field from a model. Usually involves deleting a column,
917 | but for M2Ms may involve deleting a table.
918 | """
919 | # Special-case implicit M2M tables
920 | if field.many_to_many and field.remote_field.through._meta.auto_created:
921 | return self.delete_model(field.remote_field.through)
922 | # It might not actually have a column behind it
923 | if field.db_parameters(connection=self.connection)['type'] is None:
924 | return
925 | # Drop any FK constraints, SQL Server requires explicit deletion
926 | with self.connection.cursor() as cursor:
927 | constraints = self.connection.introspection.get_constraints(cursor, model._meta.db_table)
928 | for name, infodict in constraints.items():
929 | if field.column in infodict['columns'] and infodict['foreign_key']:
930 | self.execute(self._delete_constraint_sql(self.sql_delete_fk, model, name))
931 | # Drop any indexes, SQL Server requires explicit deletion
932 | for name, infodict in constraints.items():
933 | if field.column in infodict['columns'] and infodict['index']:
934 | self.execute(self.sql_delete_index % {
935 | "table": self.quote_name(model._meta.db_table),
936 | "name": self.quote_name(name),
937 | })
938 | # Drop primary key constraint, SQL Server requires explicit deletion
939 | for name, infodict in constraints.items():
940 | if field.column in infodict['columns'] and infodict['primary_key']:
941 | self.execute(self.sql_delete_pk % {
942 | "table": self.quote_name(model._meta.db_table),
943 | "name": self.quote_name(name),
944 | })
945 | # Drop check constraints, SQL Server requires explicit deletion
946 | for name, infodict in constraints.items():
947 | if field.column in infodict['columns'] and infodict['check']:
948 | self.execute(self.sql_delete_check % {
949 | "table": self.quote_name(model._meta.db_table),
950 | "name": self.quote_name(name),
951 | })
952 | # Drop unique constraints, SQL Server requires explicit deletion
953 | for name, infodict in constraints.items():
954 | if (field.column in infodict['columns'] and infodict['unique'] and
955 | not infodict['primary_key'] and not infodict['index']):
956 | self.execute(self.sql_delete_unique % {
957 | "table": self.quote_name(model._meta.db_table),
958 | "name": self.quote_name(name),
959 | })
960 | # Delete the column
961 | sql = self.sql_delete_column % {
962 | "table": self.quote_name(model._meta.db_table),
963 | "column": self.quote_name(field.column),
964 | }
965 | self.execute(sql)
966 | # Reset connection if required
967 | if self.connection.features.connection_persists_old_columns:
968 | self.connection.close()
969 | # Remove all deferred statements referencing the deleted column.
970 | for sql in list(self.deferred_sql):
971 | if isinstance(sql, Statement) and sql.references_column(model._meta.db_table, field.column):
972 | self.deferred_sql.remove(sql)
973 |
--------------------------------------------------------------------------------