├── tests ├── testapp │ ├── __init__.py │ ├── migrations │ │ ├── __init__.py │ │ └── 0001_initial.py │ └── models.py ├── docker-compose.yml ├── settings.py ├── conftest.py ├── test_redshift_backend.py ├── test_inspectdb.py └── test_base.py ├── examples ├── proj1 │ ├── config │ │ ├── __init__.py │ │ ├── asgi.py │ │ ├── wsgi.py │ │ ├── urls.py │ │ └── settings.py │ ├── testapp │ │ ├── __init__.py │ │ └── models.py │ ├── requirements.txt │ ├── .env.psql │ ├── .env.sample │ ├── docker-compose.yml │ └── manage.py └── dj-sql-explorer │ ├── config │ ├── __init__.py │ ├── asgi.py │ ├── wsgi.py │ ├── urls.py │ └── settings.py │ ├── testapp │ ├── __init__.py │ └── models.py │ ├── requirements.txt │ ├── .env.sample │ └── manage.py ├── django_redshift_backend ├── _vendor │ ├── __init__.py │ └── django40 │ │ ├── __init__.py │ │ └── db │ │ ├── __init__.py │ │ └── backends │ │ ├── __init__.py │ │ ├── base │ │ ├── __init__.py │ │ ├── client.py │ │ ├── validation.py │ │ └── introspection.py │ │ ├── postgresql │ │ ├── __init__.py │ │ ├── client.py │ │ ├── features.py │ │ ├── creation.py │ │ ├── introspection.py │ │ ├── schema.py │ │ ├── operations.py │ │ └── base.py │ │ ├── ddl_references.py │ │ └── utils.py ├── distkey.py ├── __init__.py ├── psycopg2adapter.py └── meta.py ├── doc ├── authors.rst ├── requirements.txt ├── changes.rst ├── Makefile ├── conf.py ├── make.bat ├── basic.rst ├── index.rst ├── dev.rst ├── CODE_OF_CONDUCT.rst ├── design.rst └── refs.rst ├── .coveragerc ├── .devcontainer ├── on_create_command.sh └── devcontainer.json ├── .readthedocs.yml ├── .gitignore ├── AUTHORS.rst ├── .github ├── PULL_REQUEST_TEMPLATE.md ├── workflows │ ├── lint.yml │ ├── release.yml │ ├── test-examples-proj1.yml │ └── test.yml ├── dependabot.yml └── ISSUE_TEMPLATE.md ├── CONTRIBUTING.rst ├── checklist.rst ├── tox.ini ├── README.rst ├── pyproject.toml ├── CODE_OF_CONDUCT.md ├── CHANGES.rst └── LICENSE /tests/testapp/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /examples/proj1/config/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /examples/proj1/testapp/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/testapp/migrations/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /django_redshift_backend/_vendor/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /examples/dj-sql-explorer/config/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /examples/dj-sql-explorer/testapp/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /django_redshift_backend/_vendor/django40/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /doc/authors.rst: -------------------------------------------------------------------------------- 1 | .. include:: ../AUTHORS.rst 2 | 3 | -------------------------------------------------------------------------------- /doc/requirements.txt: -------------------------------------------------------------------------------- 1 | sphinx 2 | sphinx_rtd_theme 3 | -------------------------------------------------------------------------------- /django_redshift_backend/_vendor/django40/db/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /doc/changes.rst: -------------------------------------------------------------------------------- 1 | .. include:: ../CHANGES.rst 2 | 3 | -------------------------------------------------------------------------------- /django_redshift_backend/_vendor/django40/db/backends/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /.coveragerc: -------------------------------------------------------------------------------- 1 | [run] 2 | branch = 1 3 | source = django_redshift_backend 4 | -------------------------------------------------------------------------------- /django_redshift_backend/_vendor/django40/db/backends/base/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /django_redshift_backend/_vendor/django40/db/backends/postgresql/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /examples/proj1/requirements.txt: -------------------------------------------------------------------------------- 1 | -e ../..[psycopg2-binary] 2 | django-environ==0.8.1 3 | -------------------------------------------------------------------------------- /examples/proj1/.env.psql: -------------------------------------------------------------------------------- 1 | DATABASE_URL=redshift://test:test@localhost:5432/test 2 | SECRET_KEY=django-insecure-key 3 | -------------------------------------------------------------------------------- /examples/dj-sql-explorer/requirements.txt: -------------------------------------------------------------------------------- 1 | -e ../..[psycopg2-binary] 2 | django-environ==0.8.1 3 | django-sql-explorer 4 | python-dateutil>=2.9 5 | -------------------------------------------------------------------------------- /django_redshift_backend/distkey.py: -------------------------------------------------------------------------------- 1 | # flake8: noqa 2 | # for backward compatibility before django-redshift-backend-3.0.0 3 | from .meta import DistKey 4 | -------------------------------------------------------------------------------- /examples/proj1/.env.sample: -------------------------------------------------------------------------------- 1 | DATABASE_URL=redshift://user:password@...redshift.amazonaws.com:5439/?DISABLE_SERVER_SIDE_CURSORS=True 2 | SECRET_KEY=django-insecure-key 3 | -------------------------------------------------------------------------------- /examples/dj-sql-explorer/.env.sample: -------------------------------------------------------------------------------- 1 | DATABASE_URL=redshift://user:password@...redshift.amazonaws.com:5439/?DISABLE_SERVER_SIDE_CURSORS=True 2 | SECRET_KEY=django-insecure-key 3 | DEBUG=True 4 | -------------------------------------------------------------------------------- /.devcontainer/on_create_command.sh: -------------------------------------------------------------------------------- 1 | # setup 2 | set -ex 3 | 4 | curl -LsSf https://astral.sh/uv/install.sh | sh 5 | . $HOME/.cargo/env 6 | uv tool install -U ruff 7 | uv tool install -U tox --with tox-uv 8 | uv sync --extra psycopg2-binary 9 | -------------------------------------------------------------------------------- /.readthedocs.yml: -------------------------------------------------------------------------------- 1 | version: "2" 2 | 3 | build: 4 | os: "ubuntu-22.04" 5 | tools: 6 | python: "3.10" 7 | 8 | python: 9 | install: 10 | - requirements: doc/requirements.txt 11 | 12 | sphinx: 13 | configuration: doc/conf.py 14 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | build/ 2 | dist/ 3 | *.egg-info 4 | *.pyc 5 | .eggs 6 | .venv 7 | .cache 8 | .tox 9 | .pytest_cache/ 10 | doc/_build/ 11 | .coverage 12 | coverage.xml 13 | **/.env 14 | examples/**/migrations/* 15 | .idea/ 16 | uv.lock 17 | Pipfile 18 | -------------------------------------------------------------------------------- /AUTHORS.rst: -------------------------------------------------------------------------------- 1 | ======= 2 | AUTHORS 3 | ======= 4 | 5 | * Takayuki Shimizukawa 6 | * Kosei Kitahara 7 | * Evandro Myller 8 | * Maxime Vdb 9 | 10 | -------------------------------------------------------------------------------- /examples/proj1/docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: '3.1' 2 | 3 | services: 4 | db: 5 | image: postgres:9.6-alpine 6 | restart: always 7 | environment: 8 | POSTGRES_USER: test 9 | POSTGRES_PASSWORD: test 10 | POSTGRES_DB: test 11 | ports: 12 | - 5432:5432 13 | -------------------------------------------------------------------------------- /tests/docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: '3.1' 2 | 3 | services: 4 | db: 5 | image: postgres:9.6-alpine 6 | restart: always 7 | environment: 8 | POSTGRES_USER: user 9 | POSTGRES_PASSWORD: password 10 | POSTGRES_DB: testing 11 | ports: 12 | - 5439:5432 13 | -------------------------------------------------------------------------------- /django_redshift_backend/__init__.py: -------------------------------------------------------------------------------- 1 | from .meta import DistKey, SortKey # noqa 2 | 3 | # py38 or later 4 | from importlib.metadata import version, PackageNotFoundError 5 | 6 | try: 7 | __version__ = version("django-redshift-backend") 8 | except PackageNotFoundError: 9 | # package is not installed 10 | pass 11 | -------------------------------------------------------------------------------- /django_redshift_backend/psycopg2adapter.py: -------------------------------------------------------------------------------- 1 | from codecs import encode 2 | 3 | from psycopg2.extensions import Binary 4 | 5 | 6 | class RedshiftBinary(Binary): 7 | def getquoted(self) -> bytes: 8 | hex_encoded = encode(self.adapted, "hex_codec") 9 | statement = b"to_varbyte('%s', 'hex')::varbyte" % hex_encoded 10 | return statement 11 | -------------------------------------------------------------------------------- /.github/PULL_REQUEST_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | Subject: 2 | 3 | ### Feature or Bugfix 4 | 5 | - Feature 6 | - Bugfix 7 | 8 | ### Purpose 9 | - 10 | - 11 | 12 | ### Detail 13 | - 14 | - 15 | 16 | ### Relates 17 | - 18 | 19 | -------------------------------------------------------------------------------- /tests/settings.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | import os 3 | import environ 4 | 5 | if uri := os.environ.get("TEST_WITH_REDSHIFT"): 6 | # use URI if it has least one charactor. 7 | os.environ["DATABASE_URL"] = uri 8 | else: 9 | os.environ["DATABASE_URL"] = "redshift://user:password@localhost:5439/testing" 10 | env = environ.Env() 11 | 12 | DATABASES = { 13 | 'default': env.db() 14 | } 15 | 16 | SECRET_KEY = '' 17 | -------------------------------------------------------------------------------- /CONTRIBUTING.rst: -------------------------------------------------------------------------------- 1 | .. image:: https://jazzband.co/static/img/jazzband.svg 2 | :target: https://jazzband.co/ 3 | :alt: Jazzband 4 | 5 | This is a Jazzband_ project. By contributing you agree to abide by the `Contributor Code of Conduct`_ and follow the guidelines_. 6 | 7 | .. _Jazzband: https://jazzband.co/ 8 | .. _Contributor Code of Conduct: https://jazzband.co/about/conduct 9 | .. _guidelines: https://jazzband.co/about/guidelines 10 | 11 | -------------------------------------------------------------------------------- /examples/proj1/config/asgi.py: -------------------------------------------------------------------------------- 1 | """ 2 | ASGI config for proj1 project. 3 | 4 | It exposes the ASGI callable as a module-level variable named ``application``. 5 | 6 | For more information on this file, see 7 | https://docs.djangoproject.com/en/3.2/howto/deployment/asgi/ 8 | """ 9 | 10 | import os 11 | 12 | from django.core.asgi import get_asgi_application 13 | 14 | os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'proj1.settings') 15 | 16 | application = get_asgi_application() 17 | -------------------------------------------------------------------------------- /examples/proj1/config/wsgi.py: -------------------------------------------------------------------------------- 1 | """ 2 | WSGI config for proj1 project. 3 | 4 | It exposes the WSGI callable as a module-level variable named ``application``. 5 | 6 | For more information on this file, see 7 | https://docs.djangoproject.com/en/3.2/howto/deployment/wsgi/ 8 | """ 9 | 10 | import os 11 | 12 | from django.core.wsgi import get_wsgi_application 13 | 14 | os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'proj1.settings') 15 | 16 | application = get_wsgi_application() 17 | -------------------------------------------------------------------------------- /examples/dj-sql-explorer/config/asgi.py: -------------------------------------------------------------------------------- 1 | """ 2 | ASGI config for proj1 project. 3 | 4 | It exposes the ASGI callable as a module-level variable named ``application``. 5 | 6 | For more information on this file, see 7 | https://docs.djangoproject.com/en/3.2/howto/deployment/asgi/ 8 | """ 9 | 10 | import os 11 | 12 | from django.core.asgi import get_asgi_application 13 | 14 | os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'proj1.settings') 15 | 16 | application = get_asgi_application() 17 | -------------------------------------------------------------------------------- /examples/dj-sql-explorer/config/wsgi.py: -------------------------------------------------------------------------------- 1 | """ 2 | WSGI config for proj1 project. 3 | 4 | It exposes the WSGI callable as a module-level variable named ``application``. 5 | 6 | For more information on this file, see 7 | https://docs.djangoproject.com/en/3.2/howto/deployment/wsgi/ 8 | """ 9 | 10 | import os 11 | 12 | from django.core.wsgi import get_wsgi_application 13 | 14 | os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'proj1.settings') 15 | 16 | application = get_wsgi_application() 17 | -------------------------------------------------------------------------------- /.github/workflows/lint.yml: -------------------------------------------------------------------------------- 1 | name: Lint 2 | 3 | on: [push, pull_request] 4 | 5 | jobs: 6 | lint: 7 | runs-on: ubuntu-latest 8 | 9 | strategy: 10 | fail-fast: false 11 | matrix: 12 | linter: 13 | - lint 14 | - check 15 | 16 | steps: 17 | - uses: actions/checkout@v6 18 | 19 | - uses: actions/setup-python@v6 20 | with: 21 | python-version: "3.10" 22 | 23 | - name: Install dependencies 24 | run: | 25 | python -m pip install -U tox-uv 26 | 27 | - run: tox -e ${{ matrix.linter }} 28 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | # https://docs.github.com/github/administering-a-repository/configuration-options-for-dependency-updates 2 | version: 2 3 | updates: 4 | - package-ecosystem: "github-actions" 5 | directory: "/" 6 | schedule: 7 | interval: "monthly" 8 | groups: 9 | # Name for the group, which will be used in PR titles and branch names 10 | all-github-actions: 11 | # Group all updates together 12 | patterns: 13 | - "*" 14 | - package-ecosystem: "devcontainers" 15 | directory: "/" 16 | schedule: 17 | interval: "monthly" 18 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | Subject: 2 | 3 | ### Problem 4 | - 5 | 6 | #### Procedure to reproduce the problem 7 | ``` 8 | 9 | ``` 10 | 11 | #### Error logs / results 12 | ``` 13 | 14 | ``` 15 | - 16 | 17 | #### Expected results 18 | 19 | 20 | ### Environment info 21 | - OS: 22 | - Python version: 23 | - Django version: 24 | - Django-Redshift-Backend version: 25 | -------------------------------------------------------------------------------- /checklist.rst: -------------------------------------------------------------------------------- 1 | .. release procedure 2 | 3 | Procedure: 4 | 5 | 1. check CI status testing result: https://github.com/jazzband/django-redshift-backend/actions?query=workflow%3ATest 6 | 2. update release version/date in ``CHANGES.rst`` 7 | 3. create Github release, tagging with version name that MUST following semver. e.g.: ``git tag 1.0.1`` 8 | 4. publish Github release to invoke release process in Github Actions. 9 | 5. approve release files. please check your email or https://jazzband.co/projects/django-redshift-backend 10 | 6. check PyPI page: https://pypi.org/p/django-redshift-backend 11 | 7. bump version in ``CHANGES.rst`` and commit/push them onto GitHub 12 | 13 | -------------------------------------------------------------------------------- /doc/Makefile: -------------------------------------------------------------------------------- 1 | # Minimal makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line. 5 | SPHINXOPTS = 6 | SPHINXBUILD = sphinx-build 7 | SPHINXPROJ = DjangoRedshiftBackend 8 | SOURCEDIR = . 9 | BUILDDIR = _build 10 | 11 | # Put it first so that "make" without argument is like "make help". 12 | help: 13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 14 | 15 | .PHONY: help Makefile 16 | 17 | # Catch-all target: route all unknown targets to Sphinx using the new 18 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). 19 | %: Makefile 20 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) -------------------------------------------------------------------------------- /examples/proj1/manage.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | """Django's command-line utility for administrative tasks.""" 3 | import os 4 | import sys 5 | 6 | 7 | def main(): 8 | """Run administrative tasks.""" 9 | os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'config.settings') 10 | try: 11 | from django.core.management import execute_from_command_line 12 | except ImportError as exc: 13 | raise ImportError( 14 | "Couldn't import Django. Are you sure it's installed and " 15 | "available on your PYTHONPATH environment variable? Did you " 16 | "forget to activate a virtual environment?" 17 | ) from exc 18 | execute_from_command_line(sys.argv) 19 | 20 | 21 | if __name__ == '__main__': 22 | main() 23 | -------------------------------------------------------------------------------- /examples/dj-sql-explorer/manage.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | """Django's command-line utility for administrative tasks.""" 3 | import os 4 | import sys 5 | 6 | 7 | def main(): 8 | """Run administrative tasks.""" 9 | os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'config.settings') 10 | try: 11 | from django.core.management import execute_from_command_line 12 | except ImportError as exc: 13 | raise ImportError( 14 | "Couldn't import Django. Are you sure it's installed and " 15 | "available on your PYTHONPATH environment variable? Did you " 16 | "forget to activate a virtual environment?" 17 | ) from exc 18 | execute_from_command_line(sys.argv) 19 | 20 | 21 | if __name__ == '__main__': 22 | main() 23 | -------------------------------------------------------------------------------- /doc/conf.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | # -- Project information ----------------------------------------------------- 4 | 5 | project = 'Django Redshift Backend' 6 | copyright = '2018, Django Redshift Backend team' 7 | author = 'Django Redshift Backend team' 8 | 9 | version = release = '1.0' 10 | 11 | # -- General configuration --------------------------------------------------- 12 | 13 | extensions = [ 14 | ] 15 | 16 | source_suffix = '.rst' 17 | master_doc = 'index' 18 | language = None 19 | exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] 20 | pygments_style = 'sphinx' 21 | 22 | 23 | # -- Options for HTML output ------------------------------------------------- 24 | 25 | html_theme = 'sphinx_rtd_theme' 26 | # html_theme_options = {} 27 | 28 | -------------------------------------------------------------------------------- /examples/proj1/config/urls.py: -------------------------------------------------------------------------------- 1 | """proj1 URL Configuration 2 | 3 | The `urlpatterns` list routes URLs to views. For more information please see: 4 | https://docs.djangoproject.com/en/3.2/topics/http/urls/ 5 | Examples: 6 | Function views 7 | 1. Add an import: from my_app import views 8 | 2. Add a URL to urlpatterns: path('', views.home, name='home') 9 | Class-based views 10 | 1. Add an import: from other_app.views import Home 11 | 2. Add a URL to urlpatterns: path('', Home.as_view(), name='home') 12 | Including another URLconf 13 | 1. Import the include() function: from django.urls import include, path 14 | 2. Add a URL to urlpatterns: path('blog/', include('blog.urls')) 15 | """ 16 | from django.contrib import admin 17 | from django.urls import path 18 | 19 | urlpatterns = [ 20 | path('admin/', admin.site.urls), 21 | ] 22 | -------------------------------------------------------------------------------- /examples/dj-sql-explorer/config/urls.py: -------------------------------------------------------------------------------- 1 | """proj1 URL Configuration 2 | 3 | The `urlpatterns` list routes URLs to views. For more information please see: 4 | https://docs.djangoproject.com/en/3.2/topics/http/urls/ 5 | Examples: 6 | Function views 7 | 1. Add an import: from my_app import views 8 | 2. Add a URL to urlpatterns: path('', views.home, name='home') 9 | Class-based views 10 | 1. Add an import: from other_app.views import Home 11 | 2. Add a URL to urlpatterns: path('', Home.as_view(), name='home') 12 | Including another URLconf 13 | 1. Import the include() function: from django.urls import include, path 14 | 2. Add a URL to urlpatterns: path('blog/', include('blog.urls')) 15 | """ 16 | from django.contrib import admin 17 | from django.urls import path, include 18 | 19 | urlpatterns = [ 20 | path('explorer/', include('explorer.urls')), 21 | path('admin/', admin.site.urls), 22 | ] 23 | -------------------------------------------------------------------------------- /doc/make.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | pushd %~dp0 4 | 5 | REM Command file for Sphinx documentation 6 | 7 | if "%SPHINXBUILD%" == "" ( 8 | set SPHINXBUILD=sphinx-build 9 | ) 10 | set SOURCEDIR=. 11 | set BUILDDIR=_build 12 | set SPHINXPROJ=DjangoRedshiftBackend 13 | 14 | if "%1" == "" goto help 15 | 16 | %SPHINXBUILD% >NUL 2>NUL 17 | if errorlevel 9009 ( 18 | echo. 19 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx 20 | echo.installed, then set the SPHINXBUILD environment variable to point 21 | echo.to the full path of the 'sphinx-build' executable. Alternatively you 22 | echo.may add the Sphinx directory to PATH. 23 | echo. 24 | echo.If you don't have Sphinx installed, grab it from 25 | echo.http://sphinx-doc.org/ 26 | exit /b 1 27 | ) 28 | 29 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% 30 | goto end 31 | 32 | :help 33 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% 34 | 35 | :end 36 | popd 37 | -------------------------------------------------------------------------------- /examples/proj1/testapp/models.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | from django.db import models 4 | 5 | from django_redshift_backend.base import DistKey, SortKey 6 | 7 | 8 | class TestModel(models.Model): 9 | ctime = models.DateTimeField() 10 | text = models.TextField() 11 | uuid = models.UUIDField() 12 | 13 | 14 | class TestReferencedModel(models.Model): 15 | pass 16 | 17 | 18 | class TestModelWithMetaKeys(models.Model): 19 | name = models.CharField(max_length=100) 20 | age = models.IntegerField() 21 | created_at = models.DateTimeField() 22 | fk = models.ForeignKey(TestReferencedModel, on_delete=models.CASCADE) 23 | 24 | class Meta: 25 | indexes = [DistKey(fields=['fk'])] 26 | ordering = [SortKey('created_at'), SortKey('-id')] 27 | 28 | 29 | class TestParentModel(models.Model): 30 | age = models.IntegerField() 31 | 32 | 33 | class TestChildModel(models.Model): 34 | parent = models.ForeignKey(TestParentModel, on_delete=models.CASCADE) 35 | age = models.IntegerField() 36 | -------------------------------------------------------------------------------- /examples/dj-sql-explorer/testapp/models.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | from django.db import models 4 | 5 | from django_redshift_backend.base import DistKey, SortKey 6 | 7 | 8 | class TestModel(models.Model): 9 | ctime = models.DateTimeField() 10 | text = models.TextField() 11 | uuid = models.UUIDField() 12 | 13 | 14 | class TestReferencedModel(models.Model): 15 | pass 16 | 17 | 18 | class TestModelWithMetaKeys(models.Model): 19 | name = models.CharField(max_length=100) 20 | age = models.IntegerField() 21 | created_at = models.DateTimeField() 22 | fk = models.ForeignKey(TestReferencedModel, on_delete=models.CASCADE) 23 | 24 | class Meta: 25 | indexes = [DistKey(fields=['fk'])] 26 | ordering = [SortKey('created_at'), SortKey('-id')] 27 | 28 | 29 | class TestParentModel(models.Model): 30 | age = models.IntegerField() 31 | 32 | 33 | class TestChildModel(models.Model): 34 | parent = models.ForeignKey(TestParentModel, on_delete=models.CASCADE) 35 | age = models.IntegerField() 36 | -------------------------------------------------------------------------------- /tests/testapp/models.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | from django.db import models 4 | 5 | from django_redshift_backend.base import DistKey, SortKey 6 | 7 | 8 | class TestModel(models.Model): 9 | ctime = models.DateTimeField() 10 | text = models.TextField() 11 | uuid = models.UUIDField() 12 | 13 | 14 | class TestReferencedModel(models.Model): 15 | id = models.IntegerField() 16 | 17 | 18 | class TestModelWithMetaKeys(models.Model): 19 | name = models.CharField(max_length=100) 20 | age = models.IntegerField() 21 | created_at = models.DateTimeField() 22 | fk = models.ForeignKey(TestReferencedModel, on_delete=models.CASCADE) 23 | 24 | class Meta: 25 | indexes = [DistKey(fields=['fk'])] 26 | ordering = [SortKey('created_at'), SortKey('-id')] 27 | 28 | 29 | class TestParentModel(models.Model): 30 | age = models.IntegerField() 31 | 32 | 33 | class TestChildModel(models.Model): 34 | parent = models.ForeignKey(TestParentModel, on_delete=models.CASCADE) 35 | age = models.IntegerField() 36 | -------------------------------------------------------------------------------- /.github/workflows/release.yml: -------------------------------------------------------------------------------- 1 | name: Release 2 | 3 | on: 4 | push: 5 | tags: 6 | - '*' 7 | 8 | jobs: 9 | build: 10 | if: github.repository == 'jazzband/django-redshift-backend' 11 | runs-on: ubuntu-latest 12 | 13 | steps: 14 | - uses: actions/checkout@v6 15 | with: 16 | fetch-depth: 0 17 | 18 | - name: Set up Python 19 | uses: actions/setup-python@v6 20 | with: 21 | python-version: "3.10" 22 | 23 | - name: Install dependencies 24 | run: | 25 | curl -LsSf https://astral.sh/uv/install.sh | sh 26 | 27 | - name: Build package 28 | run: | 29 | uv build 30 | uvx twine check dist/* 31 | 32 | - name: Upload packages to Jazzband 33 | if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags') 34 | uses: pypa/gh-action-pypi-publish@master 35 | with: 36 | user: jazzband 37 | password: ${{ secrets.JAZZBAND_RELEASE_KEY }} 38 | repository_url: https://jazzband.co/projects/django-redshift-backend/upload 39 | -------------------------------------------------------------------------------- /django_redshift_backend/_vendor/django40/db/backends/base/client.py: -------------------------------------------------------------------------------- 1 | import os 2 | import subprocess 3 | 4 | 5 | class BaseDatabaseClient: 6 | """Encapsulate backend-specific methods for opening a client shell.""" 7 | 8 | # This should be a string representing the name of the executable 9 | # (e.g., "psql"). Subclasses must override this. 10 | executable_name = None 11 | 12 | def __init__(self, connection): 13 | # connection is an instance of BaseDatabaseWrapper. 14 | self.connection = connection 15 | 16 | @classmethod 17 | def settings_to_cmd_args_env(cls, settings_dict, parameters): 18 | raise NotImplementedError( 19 | "subclasses of BaseDatabaseClient must provide a " 20 | "settings_to_cmd_args_env() method or override a runshell()." 21 | ) 22 | 23 | def runshell(self, parameters): 24 | args, env = self.settings_to_cmd_args_env( 25 | self.connection.settings_dict, parameters 26 | ) 27 | env = {**os.environ, **env} if env else None 28 | subprocess.run(args, env=env, check=True) 29 | -------------------------------------------------------------------------------- /django_redshift_backend/_vendor/django40/db/backends/base/validation.py: -------------------------------------------------------------------------------- 1 | class BaseDatabaseValidation: 2 | """Encapsulate backend-specific validation.""" 3 | 4 | def __init__(self, connection): 5 | self.connection = connection 6 | 7 | def check(self, **kwargs): 8 | return [] 9 | 10 | def check_field(self, field, **kwargs): 11 | errors = [] 12 | # Backends may implement a check_field_type() method. 13 | if ( 14 | hasattr(self, "check_field_type") 15 | and 16 | # Ignore any related fields. 17 | not getattr(field, "remote_field", None) 18 | ): 19 | # Ignore fields with unsupported features. 20 | db_supports_all_required_features = all( 21 | getattr(self.connection.features, feature, False) 22 | for feature in field.model._meta.required_db_features 23 | ) 24 | if db_supports_all_required_features: 25 | field_type = field.db_type(self.connection) 26 | # Ignore non-concrete fields. 27 | if field_type is not None: 28 | errors.extend(self.check_field_type(field, field_type)) 29 | return errors 30 | -------------------------------------------------------------------------------- /doc/basic.rst: -------------------------------------------------------------------------------- 1 | ===== 2 | Basic 3 | ===== 4 | 5 | Installation 6 | ============ 7 | 8 | Please install django-redshift-backend with using pip (8.1.1 or later). 9 | 10 | .. code-block:: bash 11 | 12 | $ pip install django-redshift-backend 13 | 14 | This backend requires ``psycopg2``, which may be installed from source or wheel (pre-built binaries). 15 | If you don't want to specify it separately, you may install it using extra: 16 | 17 | .. code-block:: bash 18 | 19 | # For pre-built binary 20 | $ pip install django-redshift-backend[psycopg2-binary] 21 | 22 | # For the source distribution 23 | $ pip install django-redshift-backend[psycopg2] 24 | 25 | Please refer to the `psycopg2 documentation`_ for more details on the topic. 26 | 27 | .. _psycopg2 documentation: https://www.psycopg.org/docs/install.html#psycopg-vs-psycopg-binary 28 | 29 | Django settings 30 | =============== 31 | 32 | ENGINE for DATABASES is 'django_redshift_backend'. You can set the name in your settings.py as:: 33 | 34 | DATABASES = { 35 | 'default': { 36 | 'ENGINE': 'django_redshift_backend', 37 | 'NAME': '', 38 | 'USER': '', 39 | 'PASSWORD': '', 40 | 'HOST': '', 41 | 'PORT': '5439', 42 | } 43 | } 44 | 45 | For more information, please refer :doc:`refs`. 46 | 47 | -------------------------------------------------------------------------------- /django_redshift_backend/meta.py: -------------------------------------------------------------------------------- 1 | from django.db.models import Index 2 | 3 | 4 | class DistKey(Index): 5 | """A single-field index denoting the distkey for a model. 6 | 7 | Use as follows: 8 | 9 | class MyModel(models.Model): 10 | ... 11 | 12 | class Meta: 13 | indexes = [DistKey(fields=['customer_id'])] 14 | """ 15 | 16 | def deconstruct(self): 17 | path, expressions, kwargs = super().deconstruct() 18 | path = path.replace("django_redshift_backend.meta", "django_redshift_backend") 19 | return (path, expressions, kwargs) 20 | 21 | 22 | class SortKey(str): 23 | """A SORTKEY in Redshift, also valid as ordering in Django. 24 | 25 | https://docs.djangoproject.com/en/dev/ref/models/options/#django.db.models.Options.ordering 26 | 27 | Use as follows: 28 | 29 | class MyModel(models.Model): 30 | ... 31 | 32 | class Meta: 33 | ordering = [SortKey('created_at'), SortKey('-id')] 34 | """ 35 | 36 | def __hash__(self): 37 | return hash(str(self)) 38 | 39 | def deconstruct(self): 40 | path = "{}.{}".format(self.__class__.__module__, self.__class__.__name__) 41 | path = path.replace("django_redshift_backend.meta", "django_redshift_backend") 42 | return (path, [str(self)], {}) 43 | 44 | def __eq__(self, other): 45 | if self.__class__ == other.__class__: 46 | return self.deconstruct() == other.deconstruct() 47 | return NotImplemented 48 | -------------------------------------------------------------------------------- /.github/workflows/test-examples-proj1.yml: -------------------------------------------------------------------------------- 1 | name: Test examples/proj1 2 | 3 | on: [push, pull_request] 4 | 5 | jobs: 6 | build: 7 | name: build (Python ${{ matrix.python-version }}, Django ${{ matrix.django-version }}) 8 | runs-on: ubuntu-latest 9 | strategy: 10 | fail-fast: false 11 | max-parallel: 5 12 | matrix: 13 | python-version: ['3.10'] 14 | django-version: ['4.2', '5.0', '5.1'] 15 | include: 16 | - django-version: 'main' 17 | python-version: '3.10' 18 | 19 | services: 20 | postgres: 21 | image: postgres:9.6-alpine 22 | env: 23 | POSTGRES_USER: test 24 | POSTGRES_PASSWORD: test 25 | POSTGRES_DB: test 26 | ports: 27 | - 5432:5432 28 | 29 | steps: 30 | - uses: actions/checkout@v6 31 | 32 | - name: Set up Python ${{ matrix.python-version }} 33 | uses: actions/setup-python@v6 34 | with: 35 | python-version: ${{ matrix.python-version }} 36 | 37 | - name: Install dependencies 38 | working-directory: examples/proj1 39 | run: | 40 | curl -LsSf https://astral.sh/uv/install.sh | sh 41 | uv pip install --system -r requirements.txt 42 | uv pip install --system psycopg2-binary 43 | 44 | - name: tests 45 | working-directory: examples/proj1 46 | env: 47 | ENV_FILE: .env.psql 48 | run: | 49 | python manage.py makemigrations testapp 50 | python manage.py sqlmigrate testapp 0001 51 | -------------------------------------------------------------------------------- /.github/workflows/test.yml: -------------------------------------------------------------------------------- 1 | name: Test 2 | 3 | on: [push, pull_request] 4 | 5 | jobs: 6 | build: 7 | name: build (Python ${{ matrix.python-version }}, Django ${{ matrix.django-version }}) 8 | runs-on: ubuntu-latest 9 | strategy: 10 | fail-fast: false 11 | max-parallel: 5 12 | matrix: 13 | python-version: ['3.9', '3.10', '3.11', '3.12', '3.13'] 14 | django-version: ['4.2', '5.0', '5.1'] 15 | exclude: 16 | - django-version: '5.0' 17 | python-version: '3.9' 18 | - django-version: '5.1' 19 | python-version: '3.9' 20 | - django-version: 'main' 21 | python-version: '3.9' 22 | - django-version: '4.2' 23 | python-version: '3.13' 24 | - django-version: '5.0' 25 | python-version: '3.13' 26 | 27 | services: 28 | postgres: 29 | image: postgres:9.6-alpine 30 | env: 31 | POSTGRES_USER: user 32 | POSTGRES_PASSWORD: password 33 | POSTGRES_DB: testing 34 | ports: 35 | - 5439:5432 36 | 37 | steps: 38 | - uses: actions/checkout@v6 39 | 40 | - name: Set up Python ${{ matrix.python-version }} 41 | uses: astral-sh/setup-uv@v7 42 | with: 43 | cache-dependency-glob: "pyproject.toml" 44 | cache-suffix: ${{ matrix.python-version }} 45 | 46 | - name: Install Python 47 | run: uv python install ${{ matrix.python-version }} 48 | env: 49 | UV_PYTHON_PREFERENCE: only-managed 50 | 51 | - name: Tox tests 52 | run: uv run --only-dev tox -v 53 | env: 54 | DJANGO: ${{ matrix.django-version }} 55 | TEST_WITH_POSTGRES: 1 56 | -------------------------------------------------------------------------------- /tox.ini: -------------------------------------------------------------------------------- 1 | [tox] 2 | envlist = 3 | py{39,310,311,312}-dj42 4 | py{310,311,312}-dj50 5 | py{310,311,312,313}-dj51 6 | lint 7 | check 8 | skipsdist = True 9 | 10 | [gh-actions] 11 | python = 12 | 3.9: py39 13 | 3.10: py310, lint, check 14 | 3.11: py311 15 | 3.12: py312 16 | 3.13: py313 17 | 18 | [gh-actions:env] 19 | DJANGO = 20 | 4.2: dj42 21 | 5.0: dj50 22 | 5.1: dj51 23 | main: djmain 24 | 25 | [testenv] 26 | deps = 27 | .[psycopg2-binary] 28 | coverage 29 | pytest 30 | pytest-cov 31 | mock>=2.0 32 | django-environ 33 | dj42: Django>=4.2,<5.0 34 | dj50: Django>=5.0,<5.1 35 | dj51: Django>=5.1,<5.2 36 | djmain: https://github.com/django/django/archive/main.tar.gz 37 | setenv = 38 | DJANGO_SETTINGS_MODULE = settings 39 | PYTHONPATH = {toxinidir} 40 | TEST_WITH_POSTGRES = {env:TEST_WITH_POSTGRES:} 41 | TEST_WITH_REDSHIFT = {env:TEST_WITH_REDSHIFT:} 42 | pip_pre = True 43 | commands = 44 | pytest -v --cov django_redshift_backend --cov-append --cov-report term-missing --cov-report=xml {posargs} 45 | ignore_outcome = 46 | djmain: True 47 | ignore_errors = 48 | djmain: True 49 | 50 | [testenv:lint] 51 | basepython = python3 52 | deps=ruff 53 | commands= 54 | ruff check django_redshift_backend 55 | ruff format --check django_redshift_backend 56 | 57 | [testenv:format] 58 | basepython = python3 59 | deps=ruff 60 | commands= 61 | ruff check --fix django_redshift_backend 62 | ruff format django_redshift_backend 63 | 64 | [testenv:check] 65 | deps = uv 66 | commands = 67 | uv build --sdist --wheel 68 | uvx twine check dist/* 69 | 70 | [flake8] 71 | max-line-length=120 72 | ignore = W504 73 | exclude = tests/testapp/migrations 74 | -------------------------------------------------------------------------------- /.devcontainer/devcontainer.json: -------------------------------------------------------------------------------- 1 | // For format details, see https://aka.ms/devcontainer.json. For config options, see the 2 | // README at: https://github.com/devcontainers/templates/tree/main/src/python 3 | { 4 | "name": "Python 3", 5 | "image": "mcr.microsoft.com/devcontainers/python:1-3.10-bookworm", 6 | "features": { 7 | "ghcr.io/devcontainers/features/docker-in-docker:2": {}, 8 | "ghcr.io/devcontainers/features/git:1": {}, 9 | "ghcr.io/devcontainers/features/github-cli:1": {}, 10 | "ghcr.io/devcontainers-contrib/features/act:1": {} 11 | }, 12 | 13 | // Use 'forwardPorts' to make a list of ports inside the container available locally. 14 | // "forwardPorts": [], 15 | 16 | // Use 'postCreateCommand' to run commands after the container is created. 17 | // "postCreateCommand": "pip3 install --user -r requirements.txt", 18 | 19 | "onCreateCommand": "bash .devcontainer/on_create_command.sh", 20 | 21 | // Configure tool-specific properties. 22 | // "customizations": {}, 23 | "customizations": { 24 | // Configure properties specific to VS Code. 25 | "vscode": { 26 | // Add the IDs of extensions you want installed when the container is created. 27 | "extensions": [ 28 | "ms-python.python", 29 | "charliermarsh.ruff" 30 | ], 31 | // Set *default* container specific settings.json values on container create. 32 | "settings": { 33 | "editor.codeActionsOnSave": { 34 | "source.fixAll": "explicit" 35 | }, 36 | "[python]": { 37 | "editor.defaultFormatter": "charliermarsh.ruff" 38 | }, 39 | "python.defaultInterpreterPath": "/usr/local/bin/python" 40 | } 41 | } 42 | }, 43 | 44 | // Uncomment to connect as root instead. More info: https://aka.ms/dev-containers-non-root. 45 | // "remoteUser": "root" 46 | } 47 | -------------------------------------------------------------------------------- /doc/index.rst: -------------------------------------------------------------------------------- 1 | ======================= 2 | Django Redshift Backend 3 | ======================= 4 | 5 | This is a `Amazon Redshift`_ database backend for Django_. 6 | 7 | .. image:: https://jazzband.co/static/img/badge.svg 8 | :target: https://jazzband.co/ 9 | :alt: Jazzband 10 | 11 | .. image:: https://img.shields.io/readthedocs/django-redshift-backend/master.svg 12 | :alt: Read the Docs (master) 13 | :target: https://django-redshift-backend.rtfd.io/ 14 | 15 | .. image:: https://img.shields.io/pypi/v/django-redshift-backend.svg 16 | :alt: PyPI 17 | :target: https://pypi.org/project/django-redshift-backend/ 18 | 19 | .. image:: https://img.shields.io/pypi/pyversions/django-redshift-backend.svg 20 | :alt: PyPI - Python Version 21 | 22 | .. image:: https://img.shields.io/pypi/djversions/django-redshift-backend.svg 23 | :alt: PyPI - Django Version 24 | 25 | .. image:: https://img.shields.io/github/license/jazzband/django-redshift-backend.svg 26 | :alt: License 27 | :target: https://github.com/jazzband/django-redshift-backend/blob/master/LICENSE 28 | 29 | .. image:: https://img.shields.io/github/workflow/status/jazzband/django-redshift-backend/Test/master 30 | :alt: Tests 31 | :target: https://github.com/jazzband/django-redshift-backend/actions?query=workflow%3ATest 32 | 33 | .. image:: https://img.shields.io/github/stars/jazzband/django-redshift-backend.svg?style=social&label=Stars 34 | :alt: GitHub stars 35 | :target: https://github.com/jazzband/django-redshift-backend 36 | 37 | 38 | .. _Amazon Redshift: https://aws.amazon.com/jp/redshift/ 39 | .. _Django: https://www.djangoproject.com/ 40 | 41 | Support versions 42 | ================ 43 | 44 | This product is tested with: 45 | 46 | * Python-3.9, 3.10, 3.11, 3.12, 3.13 47 | * Django-4.2, 5.0, 5.1 48 | 49 | License 50 | ======= 51 | Apache Software License 52 | 53 | 54 | Contents 55 | ======== 56 | 57 | .. toctree:: 58 | 59 | basic 60 | refs 61 | dev 62 | design 63 | changes 64 | authors 65 | CODE_OF_CONDUCT 66 | 67 | -------------------------------------------------------------------------------- /doc/dev.rst: -------------------------------------------------------------------------------- 1 | =========== 2 | Development 3 | =========== 4 | 5 | Contribution Guideline 6 | ====================== 7 | 8 | .. include:: ../CONTRIBUTING.rst 9 | 10 | Issue Reporting 11 | =============== 12 | 13 | **To Be Written** 14 | 15 | * https://github.com/jazzband/django-redshift-backend/issues 16 | 17 | Setup development environment 18 | ============================= 19 | 20 | * Requires supported Python version 21 | * do setup under django-redshift-backend.git repository root as:: 22 | 23 | $ pip install uv 24 | $ uv sync 25 | 26 | Testing 27 | ======= 28 | 29 | Run test 30 | -------- 31 | 32 | Just run tox:: 33 | 34 | $ tox 35 | 36 | tox have several sections for testing. 37 | 38 | To test the database migration as well, start postgres and test it as follows:: 39 | 40 | $ cd tests 41 | $ docker-compose up -d 42 | $ TEST_WITH_POSTGRES=1 tox 43 | 44 | To test migrations with Redshift, do it as follows: 45 | 46 | 1. Create your redshift cruster on AWS 47 | 2. Get a redshift endpoint URI 48 | 3. run tox as: `TEST_WITH_REDSHIFT=redshift://user:password@...redshift.amazonaws.com:5439/?DISABLE_SERVER_SIDE_CURSORS=True tox` 49 | 50 | CI (Continuous Integration) 51 | ---------------------------- 52 | 53 | All tests will be run on Github Actions: 54 | 55 | * https://github.com/jazzband/django-redshift-backend/actions?query=workflow%3ATest 56 | 57 | 58 | Pull Request 59 | ============ 60 | 61 | **To Be Written** 62 | 63 | * https://github.com/jazzband/django-redshift-backend/pulls 64 | 65 | 66 | Build package 67 | ============= 68 | 69 | Use build:: 70 | 71 | $ uv build 72 | 73 | 74 | Releasing 75 | ========= 76 | 77 | New package version 78 | ------------------- 79 | 80 | The django-redshift-backend package will be uploaded to PyPI: https://pypi.org/project/django-redshift-backend/. 81 | 82 | Here is a release procefure for releasing. 83 | 84 | .. include:: ../checklist.rst 85 | 86 | 87 | Updated documentation 88 | --------------------- 89 | 90 | Sphinx documentation under ``doc/`` directory on the master branch will be automatically uploaded into ReadTheDocs: https://django-redshift-backend.rtfd.io/. 91 | 92 | -------------------------------------------------------------------------------- /tests/conftest.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | os.environ['DJANGO_SETTINGS_MODULE'] = 'settings' 4 | 5 | 6 | from django.apps import apps # noqa E402 7 | apps.populate(['testapp']) 8 | 9 | 10 | import contextlib 11 | from unittest import mock 12 | 13 | import pytest 14 | 15 | from django_redshift_backend.base import BasePGDatabaseWrapper 16 | 17 | TEST_WITH_POSTGRES = os.environ.get('TEST_WITH_POSTGRES') 18 | TEST_WITH_REDSHIFT = os.environ.get('TEST_WITH_REDSHIFT') 19 | 20 | skipif_no_database = pytest.mark.skipif( 21 | not TEST_WITH_POSTGRES and not TEST_WITH_REDSHIFT, 22 | reason="no TEST_WITH_POSTGRES/TEST_WITH_REDSHIFT are found", 23 | ) 24 | run_only_postgres = pytest.mark.skipif( 25 | not TEST_WITH_POSTGRES, 26 | reason="Test only for postgres", 27 | ) 28 | run_only_redshift = pytest.mark.skipif( 29 | not TEST_WITH_REDSHIFT, 30 | reason="Test only for redshift", 31 | ) 32 | 33 | @contextlib.contextmanager 34 | def postgres_fixture(): 35 | """A context manager that patches the database backend to use PostgreSQL 36 | for local testing. 37 | 38 | The purpose of the postgres_fixture context manager is to conditionally 39 | patch the database backend to use PostgreSQL for testing, but only if the 40 | TEST_WITH_POSTGRES variable is set to True. 41 | 42 | The reason for not using pytest.fixture in the current setup is due to the 43 | use of classes that inherit from TestCase. pytest fixtures do not directly 44 | integrate with Django's TestCase based tests. 45 | """ 46 | if TEST_WITH_POSTGRES: 47 | with \ 48 | mock.patch( 49 | 'django_redshift_backend.base.DatabaseWrapper.data_types', 50 | BasePGDatabaseWrapper.data_types, 51 | ), \ 52 | mock.patch( 53 | 'django_redshift_backend.base.DatabaseSchemaEditor._modify_params_for_redshift', 54 | lambda self, params: params 55 | ), \ 56 | mock.patch( 57 | 'django_redshift_backend.base.DatabaseSchemaEditor._get_create_options', 58 | lambda self, model: '', 59 | ): 60 | yield 61 | 62 | else: 63 | yield 64 | -------------------------------------------------------------------------------- /django_redshift_backend/_vendor/django40/db/backends/postgresql/client.py: -------------------------------------------------------------------------------- 1 | import signal 2 | 3 | from django_redshift_backend._vendor.django40.db.backends.base.client import BaseDatabaseClient 4 | 5 | 6 | class DatabaseClient(BaseDatabaseClient): 7 | executable_name = "psql" 8 | 9 | @classmethod 10 | def settings_to_cmd_args_env(cls, settings_dict, parameters): 11 | args = [cls.executable_name] 12 | options = settings_dict.get("OPTIONS", {}) 13 | 14 | host = settings_dict.get("HOST") 15 | port = settings_dict.get("PORT") 16 | dbname = settings_dict.get("NAME") 17 | user = settings_dict.get("USER") 18 | passwd = settings_dict.get("PASSWORD") 19 | passfile = options.get("passfile") 20 | service = options.get("service") 21 | sslmode = options.get("sslmode") 22 | sslrootcert = options.get("sslrootcert") 23 | sslcert = options.get("sslcert") 24 | sslkey = options.get("sslkey") 25 | 26 | if not dbname and not service: 27 | # Connect to the default 'postgres' db. 28 | dbname = "postgres" 29 | if user: 30 | args += ["-U", user] 31 | if host: 32 | args += ["-h", host] 33 | if port: 34 | args += ["-p", str(port)] 35 | if dbname: 36 | args += [dbname] 37 | args.extend(parameters) 38 | 39 | env = {} 40 | if passwd: 41 | env["PGPASSWORD"] = str(passwd) 42 | if service: 43 | env["PGSERVICE"] = str(service) 44 | if sslmode: 45 | env["PGSSLMODE"] = str(sslmode) 46 | if sslrootcert: 47 | env["PGSSLROOTCERT"] = str(sslrootcert) 48 | if sslcert: 49 | env["PGSSLCERT"] = str(sslcert) 50 | if sslkey: 51 | env["PGSSLKEY"] = str(sslkey) 52 | if passfile: 53 | env["PGPASSFILE"] = str(passfile) 54 | return args, (env or None) 55 | 56 | def runshell(self, parameters): 57 | sigint_handler = signal.getsignal(signal.SIGINT) 58 | try: 59 | # Allow SIGINT to pass to psql to abort queries. 60 | signal.signal(signal.SIGINT, signal.SIG_IGN) 61 | super().runshell(parameters) 62 | finally: 63 | # Restore the original SIGINT handler. 64 | signal.signal(signal.SIGINT, sigint_handler) 65 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | ==================================== 2 | Redshift database backend for Django 3 | ==================================== 4 | 5 | This is a `Amazon Redshift`_ database backend for Django_. 6 | 7 | .. image:: https://jazzband.co/static/img/badge.svg 8 | :target: https://jazzband.co/ 9 | :alt: Jazzband 10 | 11 | .. image:: https://img.shields.io/readthedocs/django-redshift-backend/master.svg 12 | :alt: Read the Docs (master) 13 | :target: https://django-redshift-backend.rtfd.io/ 14 | 15 | .. image:: https://img.shields.io/pypi/v/django-redshift-backend.svg 16 | :alt: PyPI 17 | :target: https://pypi.org/project/django-redshift-backend/ 18 | 19 | .. image:: https://img.shields.io/pypi/pyversions/django-redshift-backend.svg 20 | :alt: PyPI - Python Version 21 | :target: https://pypi.org/project/django-redshift-backend/ 22 | 23 | .. image:: https://img.shields.io/pypi/djversions/django-redshift-backend.svg 24 | :alt: PyPI - Django Version 25 | :target: https://pypi.org/project/django-redshift-backend/ 26 | 27 | .. image:: https://img.shields.io/github/license/jazzband/django-redshift-backend.svg 28 | :alt: License 29 | :target: https://github.com/jazzband/django-redshift-backend/blob/master/LICENSE 30 | 31 | .. image:: https://github.com/jazzband/django-redshift-backend/workflows/Test/badge.svg 32 | :target: https://github.com/jazzband/django-redshift-backend/actions 33 | :alt: GitHub Actions 34 | 35 | .. image:: https://img.shields.io/github/stars/jazzband/django-redshift-backend.svg?style=social&label=Stars 36 | :alt: GitHub stars 37 | :target: https://github.com/jazzband/django-redshift-backend 38 | 39 | .. _Amazon Redshift: https://aws.amazon.com/jp/redshift/ 40 | .. _Django: https://www.djangoproject.com/ 41 | 42 | Documentation 43 | ============= 44 | 45 | - https://django-redshift-backend.rtfd.io/ 46 | 47 | Django settings 48 | =============== 49 | 50 | ENGINE for DATABASES is 'django_redshift_backend'. You can set the name in your settings.py as:: 51 | 52 | DATABASES = { 53 | 'default': { 54 | 'ENGINE': 'django_redshift_backend', 55 | 'NAME': '', 56 | 'USER': '', 57 | 'PASSWORD': '', 58 | 'HOST': '', 59 | 'PORT': '5439', 60 | } 61 | } 62 | 63 | For more information, please refer: Documentation_ 64 | 65 | 66 | LICENSE 67 | ======= 68 | Apache Software License 69 | 70 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [project] 2 | name = "django-redshift-backend" 3 | dynamic = ["version"] 4 | description = "Redshift database backend for Django" 5 | readme = "README.rst" 6 | license = "Apache-2.0" 7 | license-files = ["LICENSE"] 8 | requires-python = ">=3.9, <4" 9 | authors = [ 10 | { name = "shimizukawa", email = "shimizukawa@gmail.com" }, 11 | ] 12 | keywords = [ 13 | "django", 14 | "redshift", 15 | ] 16 | classifiers = [ 17 | "Development Status :: 5 - Production/Stable", 18 | "Environment :: Plugins", 19 | "Framework :: Django", 20 | "Framework :: Django :: 4.2", 21 | "Framework :: Django :: 5.0", 22 | "Framework :: Django :: 5.1", 23 | "Intended Audience :: Developers", 24 | "Programming Language :: Python", 25 | "Programming Language :: Python :: 3", 26 | "Programming Language :: Python :: 3.9", 27 | "Programming Language :: Python :: 3.10", 28 | "Programming Language :: Python :: 3.11", 29 | "Programming Language :: Python :: 3.12", 30 | "Programming Language :: Python :: 3.13", 31 | "Topic :: Software Development :: Libraries :: Python Modules", 32 | ] 33 | dependencies = [ 34 | "django<5.2", 35 | "backports.zoneinfo;python_version<'3.9'", 36 | ] 37 | 38 | [project.optional-dependencies] 39 | psycopg2 = [ 40 | "psycopg2", 41 | ] 42 | psycopg2-binary = [ 43 | "psycopg2-binary", 44 | ] 45 | 46 | [dependency-groups] 47 | dev = [ 48 | "setuptools>=73.0.1", 49 | "setuptools-scm>=8.1.0", 50 | "wheel>=0.44.0", 51 | "tox-uv>=1.11.2", 52 | "tox-gh-actions>=3.2.0", 53 | "ruff>=0.6.2", 54 | "twine>=5.1.1", 55 | ] 56 | 57 | [project.urls] 58 | Documentation = "https://django-redshift-backend.readthedocs.io/" 59 | Homepage = "https://github.com/jazzband/django-redshift-backend" 60 | "Release notes" = "https://django-redshift-backend.readthedocs.io/en/master/changes.html" 61 | Source = "https://github.com/jazzband/django-redshift-backend" 62 | Tracker = "https://github.com/jazzband/django-redshift-backend/issues" 63 | 64 | [build-system] 65 | requires = ["setuptools>=64", "setuptools_scm>=8"] 66 | build-backend = "setuptools.build_meta" 67 | 68 | [tool.setuptools] 69 | include-package-data = true 70 | 71 | [tool.setuptools_scm] 72 | # this empty section means: use_scm_version=True 73 | version_scheme = "guess-next-dev" 74 | local_scheme = "no-local-version" 75 | 76 | [tool.ruff] 77 | exclude = ["django_redshift_backend/_vendor"] 78 | 79 | [tool.uv.sources] 80 | django-redshift-backend = { workspace = true } 81 | -------------------------------------------------------------------------------- /CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | # Code of Conduct 2 | 3 | As contributors and maintainers of the Jazzband projects, and in the interest of 4 | fostering an open and welcoming community, we pledge to respect all people who 5 | contribute through reporting issues, posting feature requests, updating documentation, 6 | submitting pull requests or patches, and other activities. 7 | 8 | We are committed to making participation in the Jazzband a harassment-free experience 9 | for everyone, regardless of the level of experience, gender, gender identity and 10 | expression, sexual orientation, disability, personal appearance, body size, race, 11 | ethnicity, age, religion, or nationality. 12 | 13 | Examples of unacceptable behavior by participants include: 14 | 15 | - The use of sexualized language or imagery 16 | - Personal attacks 17 | - Trolling or insulting/derogatory comments 18 | - Public or private harassment 19 | - Publishing other's private information, such as physical or electronic addresses, 20 | without explicit permission 21 | - Other unethical or unprofessional conduct 22 | 23 | The Jazzband roadies have the right and responsibility to remove, edit, or reject 24 | comments, commits, code, wiki edits, issues, and other contributions that are not 25 | aligned to this Code of Conduct, or to ban temporarily or permanently any contributor 26 | for other behaviors that they deem inappropriate, threatening, offensive, or harmful. 27 | 28 | By adopting this Code of Conduct, the roadies commit themselves to fairly and 29 | consistently applying these principles to every aspect of managing the jazzband 30 | projects. Roadies who do not follow or enforce the Code of Conduct may be permanently 31 | removed from the Jazzband roadies. 32 | 33 | This code of conduct applies both within project spaces and in public spaces when an 34 | individual is representing the project or its community. 35 | 36 | Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by 37 | contacting the roadies at `roadies@jazzband.co`. All complaints will be reviewed and 38 | investigated and will result in a response that is deemed necessary and appropriate to 39 | the circumstances. Roadies are obligated to maintain confidentiality with regard to the 40 | reporter of an incident. 41 | 42 | This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 43 | 1.3.0, available at [https://contributor-covenant.org/version/1/3/0/][version] 44 | 45 | [homepage]: https://contributor-covenant.org 46 | [version]: https://contributor-covenant.org/version/1/3/0/ 47 | -------------------------------------------------------------------------------- /doc/CODE_OF_CONDUCT.rst: -------------------------------------------------------------------------------- 1 | .. THIS IS A reStructuredText versoin of ../CODE_OF_CONDUCT.md 2 | 3 | Code of Conduct 4 | =============== 5 | 6 | As contributors and maintainers of the Jazzband projects, and in the interest of 7 | fostering an open and welcoming community, we pledge to respect all people who 8 | contribute through reporting issues, posting feature requests, updating documentation, 9 | submitting pull requests or patches, and other activities. 10 | 11 | We are committed to making participation in the Jazzband a harassment-free experience 12 | for everyone, regardless of the level of experience, gender, gender identity and 13 | expression, sexual orientation, disability, personal appearance, body size, race, 14 | ethnicity, age, religion, or nationality. 15 | 16 | Examples of unacceptable behavior by participants include: 17 | 18 | - The use of sexualized language or imagery 19 | - Personal attacks 20 | - Trolling or insulting/derogatory comments 21 | - Public or private harassment 22 | - Publishing other's private information, such as physical or electronic addresses, 23 | without explicit permission 24 | - Other unethical or unprofessional conduct 25 | 26 | The Jazzband roadies have the right and responsibility to remove, edit, or reject 27 | comments, commits, code, wiki edits, issues, and other contributions that are not 28 | aligned to this Code of Conduct, or to ban temporarily or permanently any contributor 29 | for other behaviors that they deem inappropriate, threatening, offensive, or harmful. 30 | 31 | By adopting this Code of Conduct, the roadies commit themselves to fairly and 32 | consistently applying these principles to every aspect of managing the jazzband 33 | projects. Roadies who do not follow or enforce the Code of Conduct may be permanently 34 | removed from the Jazzband roadies. 35 | 36 | This code of conduct applies both within project spaces and in public spaces when an 37 | individual is representing the project or its community. 38 | 39 | Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by 40 | contacting the roadies at `roadies@jazzband.co`. All complaints will be reviewed and 41 | investigated and will result in a response that is deemed necessary and appropriate to 42 | the circumstances. Roadies are obligated to maintain confidentiality with regard to the 43 | reporter of an incident. 44 | 45 | This Code of Conduct is adapted from the `Contributor Covenant`_, version 46 | 1.3.0, available at https://contributor-covenant.org/version/1/3/0/ 47 | 48 | .. _Contributor Covenant: https://contributor-covenant.org 49 | -------------------------------------------------------------------------------- /tests/testapp/migrations/0001_initial.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 3.2.12 on 2022-02-13 21:03 2 | 3 | from django.db import migrations, models 4 | import django.db.models.deletion 5 | import django_redshift_backend 6 | 7 | 8 | class Migration(migrations.Migration): 9 | 10 | initial = True 11 | 12 | dependencies = [ 13 | ] 14 | 15 | operations = [ 16 | migrations.CreateModel( 17 | name='TestModel', 18 | fields=[ 19 | ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), 20 | ('ctime', models.DateTimeField()), 21 | ('text', models.TextField()), 22 | ('uuid', models.UUIDField()), 23 | ], 24 | ), 25 | migrations.CreateModel( 26 | name='TestParentModel', 27 | fields=[ 28 | ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), 29 | ('age', models.IntegerField()), 30 | ], 31 | ), 32 | migrations.CreateModel( 33 | name='TestReferencedModel', 34 | fields=[ 35 | ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), 36 | ], 37 | ), 38 | migrations.CreateModel( 39 | name='TestModelWithMetaKeys', 40 | fields=[ 41 | ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), 42 | ('name', models.CharField(max_length=100)), 43 | ('age', models.IntegerField()), 44 | ('created_at', models.DateTimeField()), 45 | ('fk', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='testapp.testreferencedmodel')), 46 | ], 47 | options={ 48 | 'ordering': [django_redshift_backend.SortKey('created_at'), django_redshift_backend.SortKey('-id')], 49 | }, 50 | ), 51 | migrations.CreateModel( 52 | name='TestChildModel', 53 | fields=[ 54 | ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), 55 | ('age', models.IntegerField()), 56 | ('parent', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='testapp.testparentmodel')), 57 | ], 58 | ), 59 | migrations.AddIndex( 60 | model_name='testmodelwithmetakeys', 61 | index=django_redshift_backend.DistKey(fields=['fk'], name='testapp_tes_fk_id_cd99f5_idx'), 62 | ), 63 | ] 64 | -------------------------------------------------------------------------------- /django_redshift_backend/_vendor/django40/db/backends/postgresql/features.py: -------------------------------------------------------------------------------- 1 | import operator 2 | 3 | from django.db import InterfaceError 4 | from django_redshift_backend._vendor.django40.db.backends.base.features import BaseDatabaseFeatures 5 | from django.utils.functional import cached_property 6 | 7 | 8 | class DatabaseFeatures(BaseDatabaseFeatures): 9 | allows_group_by_selected_pks = True 10 | can_return_columns_from_insert = True 11 | can_return_rows_from_bulk_insert = True 12 | has_real_datatype = True 13 | has_native_uuid_field = True 14 | has_native_duration_field = True 15 | has_native_json_field = True 16 | can_defer_constraint_checks = True 17 | has_select_for_update = True 18 | has_select_for_update_nowait = True 19 | has_select_for_update_of = True 20 | has_select_for_update_skip_locked = True 21 | has_select_for_no_key_update = True 22 | can_release_savepoints = True 23 | supports_tablespaces = True 24 | supports_transactions = True 25 | can_introspect_materialized_views = True 26 | can_distinct_on_fields = True 27 | can_rollback_ddl = True 28 | supports_combined_alters = True 29 | nulls_order_largest = True 30 | closed_cursor_error_class = InterfaceError 31 | has_case_insensitive_like = False 32 | greatest_least_ignores_nulls = True 33 | can_clone_databases = True 34 | supports_temporal_subtraction = True 35 | supports_slicing_ordering_in_compound = True 36 | create_test_procedure_without_params_sql = """ 37 | CREATE FUNCTION test_procedure () RETURNS void AS $$ 38 | DECLARE 39 | V_I INTEGER; 40 | BEGIN 41 | V_I := 1; 42 | END; 43 | $$ LANGUAGE plpgsql;""" 44 | create_test_procedure_with_int_param_sql = """ 45 | CREATE FUNCTION test_procedure (P_I INTEGER) RETURNS void AS $$ 46 | DECLARE 47 | V_I INTEGER; 48 | BEGIN 49 | V_I := P_I; 50 | END; 51 | $$ LANGUAGE plpgsql;""" 52 | requires_casted_case_in_updates = True 53 | supports_over_clause = True 54 | only_supports_unbounded_with_preceding_and_following = True 55 | supports_aggregate_filter_clause = True 56 | supported_explain_formats = {"JSON", "TEXT", "XML", "YAML"} 57 | supports_deferrable_unique_constraints = True 58 | has_json_operators = True 59 | json_key_contains_list_matching_requires_list = True 60 | test_collations = { 61 | "non_default": "sv-x-icu", 62 | "swedish_ci": "sv-x-icu", 63 | } 64 | test_now_utc_template = "STATEMENT_TIMESTAMP() AT TIME ZONE 'UTC'" 65 | 66 | django_test_skips = { 67 | "opclasses are PostgreSQL only.": { 68 | "indexes.tests.SchemaIndexesNotPostgreSQLTests." 69 | "test_create_index_ignores_opclasses", 70 | }, 71 | } 72 | 73 | @cached_property 74 | def introspected_field_types(self): 75 | return { 76 | **super().introspected_field_types, 77 | "PositiveBigIntegerField": "BigIntegerField", 78 | "PositiveIntegerField": "IntegerField", 79 | "PositiveSmallIntegerField": "SmallIntegerField", 80 | } 81 | 82 | @cached_property 83 | def is_postgresql_11(self): 84 | return self.connection.pg_version >= 110000 85 | 86 | @cached_property 87 | def is_postgresql_12(self): 88 | return self.connection.pg_version >= 120000 89 | 90 | @cached_property 91 | def is_postgresql_13(self): 92 | return self.connection.pg_version >= 130000 93 | 94 | has_websearch_to_tsquery = property(operator.attrgetter("is_postgresql_11")) 95 | supports_covering_indexes = property(operator.attrgetter("is_postgresql_11")) 96 | supports_covering_gist_indexes = property(operator.attrgetter("is_postgresql_12")) 97 | supports_non_deterministic_collations = property( 98 | operator.attrgetter("is_postgresql_12") 99 | ) 100 | -------------------------------------------------------------------------------- /django_redshift_backend/_vendor/django40/db/backends/postgresql/creation.py: -------------------------------------------------------------------------------- 1 | import sys 2 | 3 | from psycopg2 import errorcodes 4 | 5 | from django.core.exceptions import ImproperlyConfigured 6 | from django_redshift_backend._vendor.django40.db.backends.base.creation import BaseDatabaseCreation 7 | from django_redshift_backend._vendor.django40.db.backends.utils import strip_quotes 8 | 9 | 10 | class DatabaseCreation(BaseDatabaseCreation): 11 | def _quote_name(self, name): 12 | return self.connection.ops.quote_name(name) 13 | 14 | def _get_database_create_suffix(self, encoding=None, template=None): 15 | suffix = "" 16 | if encoding: 17 | suffix += " ENCODING '{}'".format(encoding) 18 | if template: 19 | suffix += " TEMPLATE {}".format(self._quote_name(template)) 20 | return suffix and "WITH" + suffix 21 | 22 | def sql_table_creation_suffix(self): 23 | test_settings = self.connection.settings_dict["TEST"] 24 | if test_settings.get("COLLATION") is not None: 25 | raise ImproperlyConfigured( 26 | "PostgreSQL does not support collation setting at database " 27 | "creation time." 28 | ) 29 | return self._get_database_create_suffix( 30 | encoding=test_settings["CHARSET"], 31 | template=test_settings.get("TEMPLATE"), 32 | ) 33 | 34 | def _database_exists(self, cursor, database_name): 35 | cursor.execute( 36 | "SELECT 1 FROM pg_catalog.pg_database WHERE datname = %s", 37 | [strip_quotes(database_name)], 38 | ) 39 | return cursor.fetchone() is not None 40 | 41 | def _execute_create_test_db(self, cursor, parameters, keepdb=False): 42 | try: 43 | if keepdb and self._database_exists(cursor, parameters["dbname"]): 44 | # If the database should be kept and it already exists, don't 45 | # try to create a new one. 46 | return 47 | super()._execute_create_test_db(cursor, parameters, keepdb) 48 | except Exception as e: 49 | if getattr(e.__cause__, "pgcode", "") != errorcodes.DUPLICATE_DATABASE: 50 | # All errors except "database already exists" cancel tests. 51 | self.log("Got an error creating the test database: %s" % e) 52 | sys.exit(2) 53 | elif not keepdb: 54 | # If the database should be kept, ignore "database already 55 | # exists". 56 | raise 57 | 58 | def _clone_test_db(self, suffix, verbosity, keepdb=False): 59 | # CREATE DATABASE ... WITH TEMPLATE ... requires closing connections 60 | # to the template database. 61 | self.connection.close() 62 | 63 | source_database_name = self.connection.settings_dict["NAME"] 64 | target_database_name = self.get_test_db_clone_settings(suffix)["NAME"] 65 | test_db_params = { 66 | "dbname": self._quote_name(target_database_name), 67 | "suffix": self._get_database_create_suffix(template=source_database_name), 68 | } 69 | with self._nodb_cursor() as cursor: 70 | try: 71 | self._execute_create_test_db(cursor, test_db_params, keepdb) 72 | except Exception: 73 | try: 74 | if verbosity >= 1: 75 | self.log( 76 | "Destroying old test database for alias %s..." 77 | % ( 78 | self._get_database_display_str( 79 | verbosity, target_database_name 80 | ), 81 | ) 82 | ) 83 | cursor.execute("DROP DATABASE %(dbname)s" % test_db_params) 84 | self._execute_create_test_db(cursor, test_db_params, keepdb) 85 | except Exception as e: 86 | self.log("Got an error cloning the test database: %s" % e) 87 | sys.exit(2) 88 | -------------------------------------------------------------------------------- /examples/proj1/config/settings.py: -------------------------------------------------------------------------------- 1 | """ 2 | Django settings for proj1 project. 3 | 4 | Generated by 'django-admin startproject' using Django 3.2.12. 5 | 6 | For more information on this file, see 7 | https://docs.djangoproject.com/en/3.2/topics/settings/ 8 | 9 | For the full list of settings and their values, see 10 | https://docs.djangoproject.com/en/3.2/ref/settings/ 11 | """ 12 | from pathlib import Path 13 | import environ 14 | 15 | # Build paths inside the project like this: BASE_DIR / 'subdir'. 16 | BASE_DIR = Path(__file__).resolve().parent.parent 17 | 18 | env = environ.Env( 19 | # set casting, default value 20 | DEBUG=(bool, False) 21 | ) 22 | if env('ENV_FILE', default=None): 23 | # Take environment variables from specified file 24 | env.read_env(env('ENV_FILE')) 25 | elif (BASE_DIR/'.env').is_file(): 26 | # Take environment variables from .env file 27 | environ.Env.read_env(BASE_DIR/'.env') 28 | else: 29 | pass # use environment variable without .env file 30 | 31 | # Quick-start development settings - unsuitable for production 32 | # See https://docs.djangoproject.com/en/3.2/howto/deployment/checklist/ 33 | 34 | # SECURITY WARNING: keep the secret key used in production secret! 35 | SECRET_KEY = env('SECRET_KEY') 36 | 37 | # SECURITY WARNING: don't run with debug turned on in production! 38 | DEBUG = env('DEBUG') 39 | 40 | ALLOWED_HOSTS = [] 41 | 42 | 43 | # Application definition 44 | 45 | INSTALLED_APPS = [ 46 | 'django.contrib.admin', 47 | 'django.contrib.auth', 48 | 'django.contrib.contenttypes', 49 | 'django.contrib.sessions', 50 | 'django.contrib.messages', 51 | 'django.contrib.staticfiles', 52 | 53 | 'testapp', 54 | ] 55 | 56 | MIDDLEWARE = [ 57 | 'django.middleware.security.SecurityMiddleware', 58 | 'django.contrib.sessions.middleware.SessionMiddleware', 59 | 'django.middleware.common.CommonMiddleware', 60 | 'django.middleware.csrf.CsrfViewMiddleware', 61 | 'django.contrib.auth.middleware.AuthenticationMiddleware', 62 | 'django.contrib.messages.middleware.MessageMiddleware', 63 | 'django.middleware.clickjacking.XFrameOptionsMiddleware', 64 | ] 65 | 66 | ROOT_URLCONF = 'config.urls' 67 | 68 | TEMPLATES = [ 69 | { 70 | 'BACKEND': 'django.template.backends.django.DjangoTemplates', 71 | 'DIRS': [], 72 | 'APP_DIRS': True, 73 | 'OPTIONS': { 74 | 'context_processors': [ 75 | 'django.template.context_processors.debug', 76 | 'django.template.context_processors.request', 77 | 'django.contrib.auth.context_processors.auth', 78 | 'django.contrib.messages.context_processors.messages', 79 | ], 80 | }, 81 | }, 82 | ] 83 | 84 | WSGI_APPLICATION = 'config.wsgi.application' 85 | 86 | 87 | # Database 88 | # https://docs.djangoproject.com/en/3.2/ref/settings/#databases 89 | 90 | DATABASES = { 91 | 'default': env.db(), 92 | } 93 | 94 | 95 | # Password validation 96 | # https://docs.djangoproject.com/en/3.2/ref/settings/#auth-password-validators 97 | 98 | AUTH_PASSWORD_VALIDATORS = [ 99 | { 100 | 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', 101 | }, 102 | { 103 | 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', 104 | }, 105 | { 106 | 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', 107 | }, 108 | { 109 | 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator', 110 | }, 111 | ] 112 | 113 | 114 | # Internationalization 115 | # https://docs.djangoproject.com/en/3.2/topics/i18n/ 116 | 117 | LANGUAGE_CODE = 'en-us' 118 | 119 | TIME_ZONE = 'UTC' 120 | 121 | USE_I18N = True 122 | 123 | USE_L10N = True 124 | 125 | USE_TZ = True 126 | 127 | 128 | # Static files (CSS, JavaScript, Images) 129 | # https://docs.djangoproject.com/en/3.2/howto/static-files/ 130 | 131 | STATIC_URL = '/static/' 132 | 133 | # Default primary key field type 134 | # https://docs.djangoproject.com/en/3.2/ref/settings/#default-auto-field 135 | 136 | DEFAULT_AUTO_FIELD = 'django.db.models.AutoField' 137 | -------------------------------------------------------------------------------- /doc/design.rst: -------------------------------------------------------------------------------- 1 | =================== 2 | Design Overview 3 | =================== 4 | 5 | Purpose 6 | =========== 7 | 8 | `django-redshift-backend` provides a backend for integrating Amazon Redshift database with the Django framework. It allows Django applications to use Redshift as their database while maintaining compatibility with Django's ORM and database abstraction layer. 9 | 10 | Background of the Changes to support Django 4.2 11 | ===================================================== 12 | 13 | Amazon Redshift is forked from an older version of PostgreSQL. As a result, it is difficult to directly use Django's PostgreSQL database backend, especially with the newer versions of Django (4.2, 5.0) where compatibility issues arise. 14 | So, the current changes are aimed at supporting Django 4.2 by including Django 4.0 code. 15 | 16 | Main Changes 17 | ----------------- 18 | 19 | 1. **Inclusion of Django 4.0 Code**: 20 | 21 | - To ensure that the Redshift backend works with Django 4.2 and future versions (such as Django 5.0), we have included database-related code from Django 4.0 in the package. 22 | - This avoids the difficulties in implementing the Redshift backend with Django 4.2's codebase. 23 | Difficulties: https://github.com/jazzband/django-redshift-backend/pull/111 24 | 25 | 2. **Ensuring Code Compatibility**: 26 | 27 | - We have made necessary modifications and adjustments to ensure operation with Django 4.2 and later versions. 28 | - Specific changes can be viewed at the following link: https://github.com/jazzband/django-redshift-backend/pull/129 29 | 30 | Key Components of django-redshift-backend 31 | ============================================= 32 | 33 | 1. **Custom Database Backend** 34 | 35 | - Extends Django's PostgreSQL backend 36 | - Implements Redshift-specific functionality 37 | - Handles differences between PostgreSQL and Redshift 38 | 39 | 2. **SQL Compiler** 40 | 41 | - Modifies SQL generation to be compatible with Redshift 42 | - Handles Redshift-specific SQL syntax and limitations 43 | 44 | 3. **Schema Editor** 45 | 46 | - Customizes schema migrations for Redshift 47 | - Manages Redshift-specific data types and constraints 48 | 49 | Design Principles 50 | ==================== 51 | 52 | 1. **Compatibility**: Maintain maximum compatibility with Django's existing PostgreSQL backend 53 | 2. **Transparency**: Allow developers to use Django's ORM without significant changes to their code 54 | 3. **Flexibility**: Support Redshift-specific features where possible 55 | 56 | Key Challenges 57 | ==================== 58 | 59 | 1. **Version Compatibility**: 60 | Maintain compatibility with Redshift by using the database backend from Django 4.0, which is based on PostgreSQL 10 (no longer supported by Django). This ensures stable operation even with the latest Django versions. 61 | 62 | 2. **SQL Differences**: 63 | Handle syntactical and functional differences between PostgreSQL and Redshift. Particularly, some PostgreSQL DDL (Data Definition Language) statements are not compatible with Redshift, requiring adjustments in areas such as table creation and constraint handling. 64 | 65 | 3. **Data Type Mapping**: 66 | Map Django field types to appropriate Redshift data types. This is crucial as Redshift has different data types and limitations compared to standard PostgreSQL. 67 | 68 | Implementation Strategy 69 | ============================ 70 | 71 | 1. Use Django 4.0's PostgreSQL backend as the base for the custom Redshift backend 72 | 2. Override necessary methods to implement Redshift-specific behavior 73 | 3. Implement custom SQL compilation logic to generate Redshift-compatible SQL 74 | 4. Develop schema editing logic that accounts for Redshift's limitations 75 | 76 | Testing and Validation 77 | ======================== 78 | 79 | 1. Unit tests for Redshift-specific functionality and limitations 80 | 2. Integration tests with actual Redshift instances 81 | 3. Compatibility testing with supporting Django and Python versions 82 | 4. Operational verification in common Django application scenarios 83 | 84 | Future Considerations 85 | ============================ 86 | 87 | 1. Ongoing maintenance of the Django 4.0-based code to ensure continued compatibility with newer Django versions. 88 | 89 | 2. Exploration of a potential re-implementation from scratch based on Django 4.2 or later. This would involve: 90 | 91 | - Analyzing the feasibility of adapting to Django 4.2's database backend structure 92 | - Evaluating the benefits and drawbacks of a complete rewrite 93 | -------------------------------------------------------------------------------- /examples/dj-sql-explorer/config/settings.py: -------------------------------------------------------------------------------- 1 | """ 2 | Django settings for proj1 project. 3 | 4 | Generated by 'django-admin startproject' using Django 3.2.12. 5 | 6 | For more information on this file, see 7 | https://docs.djangoproject.com/en/3.2/topics/settings/ 8 | 9 | For the full list of settings and their values, see 10 | https://docs.djangoproject.com/en/3.2/ref/settings/ 11 | """ 12 | from pathlib import Path 13 | import environ 14 | 15 | # Build paths inside the project like this: BASE_DIR / 'subdir'. 16 | BASE_DIR = Path(__file__).resolve().parent.parent 17 | 18 | env = environ.Env( 19 | # set casting, default value 20 | DEBUG=(bool, False) 21 | ) 22 | if env('ENV_FILE', default=None): 23 | # Take environment variables from specified file 24 | env.read_env(env('ENV_FILE')) 25 | elif (BASE_DIR/'.env').is_file(): 26 | # Take environment variables from .env file 27 | environ.Env.read_env(BASE_DIR/'.env') 28 | else: 29 | pass # use environment variable without .env file 30 | 31 | # Quick-start development settings - unsuitable for production 32 | # See https://docs.djangoproject.com/en/3.2/howto/deployment/checklist/ 33 | 34 | # SECURITY WARNING: keep the secret key used in production secret! 35 | SECRET_KEY = env('SECRET_KEY') 36 | 37 | # SECURITY WARNING: don't run with debug turned on in production! 38 | DEBUG = env('DEBUG') 39 | 40 | ALLOWED_HOSTS = [] 41 | 42 | 43 | # Application definition 44 | 45 | INSTALLED_APPS = [ 46 | 'django.contrib.admin', 47 | 'django.contrib.auth', 48 | 'django.contrib.contenttypes', 49 | 'django.contrib.sessions', 50 | 'django.contrib.messages', 51 | 'django.contrib.staticfiles', 52 | 53 | 'explorer', 54 | 'testapp', 55 | ] 56 | 57 | MIDDLEWARE = [ 58 | 'django.middleware.security.SecurityMiddleware', 59 | 'django.contrib.sessions.middleware.SessionMiddleware', 60 | 'django.middleware.common.CommonMiddleware', 61 | 'django.middleware.csrf.CsrfViewMiddleware', 62 | 'django.contrib.auth.middleware.AuthenticationMiddleware', 63 | 'django.contrib.messages.middleware.MessageMiddleware', 64 | 'django.middleware.clickjacking.XFrameOptionsMiddleware', 65 | ] 66 | 67 | ROOT_URLCONF = 'config.urls' 68 | 69 | TEMPLATES = [ 70 | { 71 | 'BACKEND': 'django.template.backends.django.DjangoTemplates', 72 | 'DIRS': [], 73 | 'APP_DIRS': True, 74 | 'OPTIONS': { 75 | 'context_processors': [ 76 | 'django.template.context_processors.debug', 77 | 'django.template.context_processors.request', 78 | 'django.contrib.auth.context_processors.auth', 79 | 'django.contrib.messages.context_processors.messages', 80 | ], 81 | }, 82 | }, 83 | ] 84 | 85 | WSGI_APPLICATION = 'config.wsgi.application' 86 | 87 | 88 | # Database 89 | # https://docs.djangoproject.com/en/3.2/ref/settings/#databases 90 | 91 | DATABASES = { 92 | 'default': env.db(), 93 | } 94 | 95 | 96 | # Password validation 97 | # https://docs.djangoproject.com/en/3.2/ref/settings/#auth-password-validators 98 | 99 | AUTH_PASSWORD_VALIDATORS = [ 100 | { 101 | 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', 102 | }, 103 | { 104 | 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', 105 | }, 106 | { 107 | 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', 108 | }, 109 | { 110 | 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator', 111 | }, 112 | ] 113 | 114 | 115 | # Internationalization 116 | # https://docs.djangoproject.com/en/3.2/topics/i18n/ 117 | 118 | LANGUAGE_CODE = 'en-us' 119 | 120 | TIME_ZONE = 'UTC' 121 | 122 | USE_I18N = True 123 | 124 | USE_L10N = True 125 | 126 | USE_TZ = True 127 | 128 | 129 | # Static files (CSS, JavaScript, Images) 130 | # https://docs.djangoproject.com/en/3.2/howto/static-files/ 131 | 132 | STATIC_URL = '/static/' 133 | 134 | # Default primary key field type 135 | # https://docs.djangoproject.com/en/3.2/ref/settings/#default-auto-field 136 | 137 | DEFAULT_AUTO_FIELD = 'django.db.models.AutoField' 138 | 139 | # django-sql-explorer 140 | EXPLORER_CONNECTIONS = { 'Default': 'default' } 141 | EXPLORER_DEFAULT_CONNECTION = 'default' 142 | 143 | LOGGING = { 144 | 'version': 1, 145 | 'disable_existing_loggers': False, 146 | 'handlers': { 147 | 'console': { 148 | 'level': 'DEBUG', 149 | 'class': 'logging.StreamHandler', 150 | }, 151 | }, 152 | 'loggers': { 153 | 'django.db.backends': { 154 | 'handlers': ['console'], 155 | 'level': 'DEBUG', 156 | }, 157 | }, 158 | } -------------------------------------------------------------------------------- /tests/test_redshift_backend.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | import unittest 4 | 5 | from django.db import connections 6 | from django.db.utils import NotSupportedError 7 | from django.core.management.color import no_style 8 | 9 | from conftest import skipif_no_database 10 | 11 | 12 | def norm_sql(sql): 13 | return ' '.join(sql.split()).replace('( ', '(').replace(' )', ')').replace(' ;', ';') 14 | 15 | 16 | class DatabaseWrapperTest(unittest.TestCase): 17 | 18 | def test_load_redshift_backend(self): 19 | db = connections['default'] 20 | self.assertIsNotNone(db) 21 | 22 | 23 | expected_ddl_normal = norm_sql( 24 | u'''CREATE TABLE "testapp_testmodel" ( 25 | "id" integer identity(1, 1) NOT NULL PRIMARY KEY, 26 | "ctime" timestamp with time zone NOT NULL, 27 | "text" varchar(max) NOT NULL, 28 | "uuid" varchar(36) NOT NULL 29 | ) 30 | ;''') 31 | 32 | expected_ddl_meta_keys = norm_sql( 33 | u'''CREATE TABLE "testapp_testmodelwithmetakeys" ( 34 | "id" integer identity(1, 1) NOT NULL PRIMARY KEY, 35 | "name" varchar(100) NOT NULL, 36 | "age" integer NOT NULL, 37 | "created_at" timestamp with time zone NOT NULL, 38 | "fk_id" integer NOT NULL 39 | ) DISTKEY("fk_id") SORTKEY("created_at", "id") 40 | ;''') 41 | 42 | 43 | expected_dml_annotate = norm_sql( 44 | u'''SELECT 45 | "testapp_testparentmodel"."id", 46 | "testapp_testparentmodel"."age", 47 | COUNT("testapp_testchildmodel"."id") AS "cnt" 48 | FROM "testapp_testparentmodel" 49 | LEFT OUTER JOIN "testapp_testchildmodel" 50 | ON ("testapp_testparentmodel"."id" = "testapp_testchildmodel"."parent_id") 51 | GROUP BY 52 | "testapp_testparentmodel"."id", 53 | "testapp_testparentmodel"."age" 54 | ''') 55 | 56 | expected_aggregate_filter_emulated = norm_sql( 57 | u'''SELECT 58 | "testapp_testparentmodel"."id", 59 | "testapp_testparentmodel"."age", 60 | COUNT( 61 | CASE WHEN "testapp_testparentmodel"."age" < %s 62 | THEN "testapp_testchildmodel"."id" ELSE NULL END 63 | ) AS "cnt" 64 | FROM "testapp_testparentmodel" 65 | LEFT OUTER JOIN "testapp_testchildmodel" 66 | ON ("testapp_testparentmodel"."id" = "testapp_testchildmodel"."parent_id") 67 | GROUP BY 68 | "testapp_testparentmodel"."id", 69 | "testapp_testparentmodel"."age" 70 | ''') 71 | 72 | expected_dml_distinct = norm_sql( 73 | u'''SELECT DISTINCT 74 | "testapp_testmodel"."id", 75 | "testapp_testmodel"."ctime", 76 | "testapp_testmodel"."text", 77 | "testapp_testmodel"."uuid" 78 | FROM "testapp_testmodel" 79 | ''') 80 | 81 | 82 | class ModelTest(unittest.TestCase): 83 | 84 | def check_model_creation(self, model, expected_ddl): 85 | conn = connections['default'] 86 | statements, params = conn.creation.sql_create_model(model, no_style(), set()) 87 | sql = norm_sql(''.join(statements)) 88 | self.assertEqual(sql, expected_ddl) 89 | 90 | def test_annotate(self): 91 | from django.db.models import Count 92 | from testapp.models import TestParentModel 93 | query = TestParentModel.objects.annotate(cnt=Count('testchildmodel')).query 94 | compiler = query.get_compiler(using='default') 95 | sql = norm_sql(compiler.as_sql()[0]) 96 | self.assertEqual(sql, expected_dml_annotate) 97 | 98 | def test_emulate_aggregate_filter(self): 99 | self.maxDiff = None 100 | from django.db.models import Count, Q 101 | from testapp.models import TestParentModel 102 | query = TestParentModel.objects.annotate( 103 | cnt=Count('testchildmodel', filter=Q(age__lt=10)) 104 | ).query 105 | compiler = query.get_compiler(using='default') 106 | sql = norm_sql(compiler.as_sql()[0]) 107 | self.assertEqual(sql, expected_aggregate_filter_emulated) 108 | 109 | def test_insert_uuid_field(self): 110 | import uuid 111 | from django.db.models import sql 112 | from testapp.models import TestModel 113 | obj = TestModel(uuid=uuid.uuid4()) 114 | q = sql.InsertQuery(obj) 115 | q.insert_values(obj._meta.local_fields, [obj]) 116 | statements = q.get_compiler('default').as_sql() 117 | # uuid is the last field of TestModel 118 | uuid_insert_value = statements[0][1][-1] 119 | # the Python value for insertion must be a string whose length is 32 120 | self.assertEqual(type(uuid_insert_value), str) 121 | self.assertEqual(len(uuid_insert_value), 32) 122 | 123 | def test_distinct(self): 124 | from testapp.models import TestModel 125 | query = TestModel.objects.distinct().query 126 | compiler = query.get_compiler(using='default') 127 | sql = norm_sql(compiler.as_sql()[0]) 128 | self.assertEqual(sql, expected_dml_distinct) 129 | 130 | def test_distinct_with_fields(self): 131 | from testapp.models import TestModel 132 | query = TestModel.objects.distinct('text').query 133 | compiler = query.get_compiler(using='default') 134 | with self.assertRaises(NotSupportedError): 135 | compiler.as_sql() 136 | 137 | 138 | class MigrationTest(unittest.TestCase): 139 | 140 | def check_model_creation(self, model, expected_ddl): 141 | conn = connections['default'] 142 | schema_editor = conn.schema_editor(collect_sql=True) 143 | schema_editor.deferred_sql = [] 144 | schema_editor.create_model(model) 145 | sql = norm_sql(''.join(schema_editor.collected_sql)) 146 | self.assertEqual(sql, expected_ddl) 147 | 148 | def test_create_model(self): 149 | from testapp.models import TestModel 150 | self.check_model_creation(TestModel, expected_ddl_normal) 151 | 152 | def test_create_table_meta_keys(self): 153 | from testapp.models import TestModelWithMetaKeys 154 | self.check_model_creation(TestModelWithMetaKeys, expected_ddl_meta_keys) 155 | 156 | @skipif_no_database 157 | def test_sqlmigrate(self): 158 | from django.db import connection 159 | from django.db.migrations.loader import MigrationLoader 160 | loader = MigrationLoader(connection) 161 | collect_sql = loader.collect_sql 162 | 163 | app_label, migration_name = 'testapp', '0001' 164 | migration = loader.get_migration_by_prefix(app_label, migration_name) 165 | target = (app_label, migration.name) 166 | 167 | plan = [(loader.graph.nodes[target], False)] 168 | sql_statements = collect_sql(plan) 169 | print('\n'.join(sql_statements)) 170 | assert sql_statements # It doesn't matter what SQL is generated. 171 | -------------------------------------------------------------------------------- /doc/refs.rst: -------------------------------------------------------------------------------- 1 | ========== 2 | References 3 | ========== 4 | 5 | .. contents:: 6 | :local: 7 | 8 | Differences from postgres_psycopg2 backend 9 | ========================================== 10 | 11 | Type mapping: 12 | 13 | * 'integer identity(1, 1)' for AutoField 14 | * 'bigint identity(1, 1)' for BigAutoField 15 | * 'timestamp with time zone' for DateTimeField 16 | * 'varchar(max)' for TextField 17 | * 'varchar(32)' for UUIDField 18 | * Possibility to multiply VARCHAR length to support utf-8 string, using 19 | `REDSHIFT_VARCHAR_LENGTH_MULTIPLIER` setting. 20 | 21 | Stop using: 22 | 23 | * RETURNING (single insert and bulk insert) 24 | * SELECT FOR UPDATE 25 | * SELECT DISTINCT ON 26 | * SET CONSTRAINTS 27 | * INDEX 28 | * DEFERRABLE INITIALLY DEFERRED 29 | * CONSTRAINT 30 | * CHECK 31 | * DROP DEFAULT 32 | 33 | To support migration: 34 | 35 | * To add column to existent table on Redshift, column must be nullable 36 | * To support modify column, add new column -> data migration -> drop old column -> rename 37 | 38 | Please note that the migration support for redshift is not perfect yet. 39 | 40 | 41 | Note and Limitation 42 | -------------------- 43 | 44 | Amazon Redshift doesn't support RETURNING, so ``last_insert_id`` method retrieve MAX(pk) after insertion as a workaround. 45 | 46 | refs: 47 | 48 | * https://stackoverflow.com/q/19428860 49 | * https://stackoverflow.com/q/25638539 50 | 51 | In some case, MAX(pk) workaround does not work correctly. 52 | Bulk insertion makes non-contiguous IDs like: 1, 4, 7, 10, ... 53 | and single insertion after such bulk insertion generates strange id value like 2 (smallest non-used id). 54 | 55 | 56 | Django Settings 57 | =============== 58 | 59 | settings.DATABASES 60 | -------------------- 61 | 62 | :ENGINE: 63 | Set 'django_redshift_backend'. 64 | 65 | :NAME: 66 | Set ''. 67 | 68 | :USER: 69 | Set ''. 70 | 71 | :PASSWORD: 72 | Set ''. 73 | 74 | :HOST: 75 | Set ''. 76 | 77 | :PORT: 78 | Set your Redshift server port number. Maybe '5439'. 79 | 80 | 81 | settings.REDSHIFT_VARCHAR_LENGTH_MULTIPLIER 82 | ------------------------------------------- 83 | 84 | Possibility to multiply VARCHAR length to support utf-8 string. Default is 1. 85 | 86 | See also: https://docs.aws.amazon.com/redshift/latest/dg/r_Character_types.html#r_Character_types-storage-and-ranges 87 | 88 | 89 | Django Models 90 | ============= 91 | 92 | Using sortkey 93 | ------------- 94 | 95 | There is built-in support for this option for Django >= 1.9. To use `sortkey`, define an `ordering` on the model 96 | meta with the custom sortkey type `django_redshift_backend.SortKey` as follow:: 97 | 98 | class MyModel(models.Model): 99 | ... 100 | 101 | class Meta: 102 | ordering = [SortKey('col2')] 103 | 104 | `SortKey` in `ordering` are also valid as ordering in Django. 105 | 106 | N.B.: there is no validation of this option, instead we let Redshift validate it for you. Be sure to refer to the `documentation `_. 107 | 108 | Using distkey 109 | ------------- 110 | 111 | There is built-in support for this option for Django >= 1.11. To use `distkey`, define an index on the model 112 | meta with the custom index type `django_redshift_backend.DistKey` with `fields` naming a single field:: 113 | 114 | class MyModel(models.Model): 115 | ... 116 | 117 | class Meta: 118 | indexes = [DistKey(fields=['customer_id'])] 119 | 120 | Redshift doesn't have conventional indexes, and we don't generate SQL for them. We merely use 121 | `indexes` as a convenient place in the Meta to identify the `distkey`. 122 | 123 | You will likely encounter the following complication: 124 | 125 | Inlining Index Migrations 126 | ~~~~~~~~~~~~~~~~~~~~~~~~~ 127 | Django's `makemigrations` generates a migration file that first applies a `CreateModel` operation without the 128 | `indexes` option, and then adds the index in a separate `AddIndex` operation. 129 | 130 | However Redshift requires that the `distkey` be specified at table creation. As a result, you may need to 131 | manually edit your migration files to move the index creation into the initial `CreateModel`. 132 | 133 | That is, to go from:: 134 | 135 | operations = [ 136 | ... 137 | migrations.CreateModel( 138 | name='FactTable', 139 | fields=[ 140 | ('distkeycol', models.CharField()), 141 | ('measure1', models.IntegerField()), 142 | ('measure2', models.IntegerField()) 143 | ... 144 | ] 145 | ), 146 | ... 147 | migrations.AddIndex( 148 | model_name='facttable', 149 | index=django_redshift_backend.DistKey(fields=['distkeycol'], name='...'), 150 | ), 151 | ] 152 | 153 | To:: 154 | 155 | operations = [ 156 | ... 157 | migrations.CreateModel( 158 | name='FactTable', 159 | fields=[ 160 | ('distkeycol', models.CharField()), 161 | ('measure1', models.IntegerField()), 162 | ('measure2', models.IntegerField()) 163 | ... 164 | ], 165 | options={ 166 | 'indexes': [django_redshift_backend.DistKey(fields=['distkeycol'], name='...')], 167 | }, 168 | ), 169 | ... 170 | ] 171 | 172 | 173 | Inlining ForeignKey Migrations 174 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 175 | It is common to distribute fact tables on a foreign key column referencing the primary key of a dimension table. 176 | 177 | In this case you may also encounter the following added complication: 178 | 179 | Django's `makemigrations` generates a migration file that first applies a `CreateModel` operation without the 180 | `ForeignKey` column, and then adds the `ForeignKey` column in a separate `AddField` operation. It does this to 181 | avoid attempts to create foreign key constraints against tables that haven't been created yet. 182 | 183 | However Redshift requires that the `distkey` be specified at table creation. As a result, you may need to 184 | manually edit your migration files to move the ForeignKey column into the initial `CreateModel`, while also 185 | ensuring that the referenced table appears *before* the referencing table in the file. 186 | 187 | That is, to go from:: 188 | 189 | operations = [ 190 | ... 191 | migrations.CreateModel( 192 | name='FactTable', 193 | fields=[ 194 | ('measure1', models.IntegerField()), 195 | ('measure2', models.IntegerField()) 196 | ... 197 | ] 198 | ), 199 | ... 200 | migrations.CreateModel( 201 | name='Dimension1Table', 202 | fields=[ 203 | ... 204 | ] 205 | ), 206 | ... 207 | migrations.AddField( 208 | model_name='facttable', 209 | name='dim1', 210 | field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='myapp.Dimension1Table'), 211 | ), 212 | ... 213 | ] 214 | 215 | To:: 216 | 217 | operations = [ 218 | migrations.CreateModel( 219 | name='Dimension1Table', 220 | fields=[ 221 | ... 222 | ] 223 | ), 224 | ... 225 | migrations.CreateModel( 226 | name='FactTable', 227 | fields=[ 228 | ('measure1', models.IntegerField()), 229 | ('measure2', models.IntegerField()), 230 | ('dim1', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='myapp.Dimension1Table')) 231 | ... 232 | ] 233 | ), 234 | ... 235 | ] 236 | 237 | -------------------------------------------------------------------------------- /tests/test_inspectdb.py: -------------------------------------------------------------------------------- 1 | import os 2 | from io import StringIO 3 | from textwrap import dedent 4 | from unittest import mock 5 | import unittest 6 | 7 | from django.db import connections 8 | from django.core.management import call_command 9 | 10 | from test_base import OperationTestBase 11 | 12 | from conftest import skipif_no_database, postgres_fixture 13 | 14 | def norm_sql(sql): 15 | return ' '.join(sql.split()).replace('( ', '(').replace(' )', ')').replace(' ;', ';') 16 | 17 | 18 | class IntrospectionTest(unittest.TestCase): 19 | expected_table_description_metadata = norm_sql( 20 | u'''SELECT 21 | a.attname AS column_name, 22 | NOT (a.attnotnull OR (t.typtype = 'd' AND t.typnotnull)) AS is_nullable, 23 | pg_get_expr(ad.adbin, ad.adrelid) AS column_default 24 | FROM pg_attribute a 25 | LEFT JOIN pg_attrdef ad ON a.attrelid = ad.adrelid AND a.attnum = ad.adnum 26 | JOIN pg_type t ON a.atttypid = t.oid 27 | JOIN pg_class c ON a.attrelid = c.oid 28 | JOIN pg_namespace n ON c.relnamespace = n.oid 29 | WHERE c.relkind IN ('f', 'm', 'p', 'r', 'v') 30 | AND c.relname = %s 31 | AND n.nspname NOT IN ('pg_catalog', 'pg_toast') 32 | AND pg_catalog.pg_table_is_visible(c.oid) 33 | ''') 34 | 35 | expected_constraints_query = norm_sql( 36 | u''' SELECT 37 | c.conname, 38 | c.conkey::int[], 39 | c.conrelid, 40 | c.contype, 41 | (SELECT fkc.relname || '.' || fka.attname 42 | FROM pg_attribute AS fka 43 | JOIN pg_class AS fkc ON fka.attrelid = fkc.oid 44 | WHERE fka.attrelid = c.confrelid AND fka.attnum = c.confkey[1]) 45 | FROM pg_constraint AS c 46 | JOIN pg_class AS cl ON c.conrelid = cl.oid 47 | WHERE cl.relname = %s AND pg_catalog.pg_table_is_visible(cl.oid) 48 | ''') 49 | 50 | expected_attributes_query = norm_sql( 51 | u'''SELECT 52 | attrelid, -- table oid 53 | attnum, 54 | attname 55 | FROM pg_attribute 56 | WHERE pg_attribute.attrelid = %s 57 | ORDER BY attrelid, attnum; 58 | ''') 59 | 60 | expected_indexes_query = norm_sql( 61 | u'''SELECT 62 | c2.relname, 63 | idx.indrelid, 64 | idx.indkey, -- type "int2vector", returns space-separated string 65 | idx.indisunique, 66 | idx.indisprimary 67 | FROM 68 | pg_catalog.pg_class c, 69 | pg_catalog.pg_class c2, 70 | pg_catalog.pg_index idx 71 | WHERE 72 | c.oid = idx.indrelid 73 | AND idx.indexrelid = c2.oid 74 | AND c.relname = %s 75 | ''') 76 | 77 | def test_get_table_description_does_not_use_unsupported_functions(self): 78 | conn = connections['default'] 79 | with mock.patch.object(conn, 'cursor') as mock_cursor_method: 80 | mock_cursor = mock_cursor_method.return_value.__enter__.return_value 81 | from testapp.models import TestModel 82 | table_name = TestModel._meta.db_table 83 | 84 | _ = conn.introspection.get_table_description(mock_cursor, table_name) 85 | 86 | ( 87 | select_metadata_call, 88 | fetchall_call, 89 | select_row_call 90 | ) = mock_cursor.method_calls 91 | 92 | call_method, call_args, call_kwargs = select_metadata_call 93 | self.assertEqual('execute', call_method) 94 | executed_sql = norm_sql(call_args[0]) 95 | 96 | self.assertEqual(self.expected_table_description_metadata, executed_sql) 97 | 98 | self.assertNotIn('collation', executed_sql) 99 | self.assertNotIn('unnest', executed_sql) 100 | 101 | call_method, call_args, call_kwargs = select_row_call 102 | self.assertEqual( 103 | norm_sql('SELECT * FROM "testapp_testmodel" LIMIT 1'), 104 | call_args[0], 105 | ) 106 | 107 | def test_get_get_constraints_does_not_use_unsupported_functions(self): 108 | conn = connections['default'] 109 | with mock.patch.object(conn, 'cursor') as mock_cursor_method: 110 | mock_cursor = mock_cursor_method.return_value.__enter__.return_value 111 | from testapp.models import TestModel 112 | table_name = TestModel._meta.db_table 113 | 114 | mock_cursor.fetchall.side_effect = [ 115 | # conname, conkey, conrelid, contype, used_cols) 116 | [ 117 | ( 118 | 'testapp_testmodel_testapp_testmodel_id_pkey', 119 | [1], 120 | 12345678, 121 | 'p', 122 | None, 123 | ), 124 | ], 125 | [ 126 | # attrelid, attnum, attname 127 | (12345678, 1, 'id'), 128 | (12345678, 2, 'ctime'), 129 | (12345678, 3, 'text'), 130 | (12345678, 4, 'uuid'), 131 | ], 132 | # index_name, indrelid, indkey, unique, primary 133 | [ 134 | ( 135 | 'testapp_testmodel_testapp_testmodel_id_pkey', 136 | 12345678, 137 | '1', 138 | True, 139 | True, 140 | ), 141 | ], 142 | ] 143 | 144 | table_constraints = conn.introspection.get_constraints( 145 | mock_cursor, table_name) 146 | 147 | expected_table_constraints = { 148 | 'testapp_testmodel_testapp_testmodel_id_pkey': { 149 | 'columns': ['id'], 150 | 'primary_key': True, 151 | 'unique': True, 152 | 'foreign_key': None, 153 | 'check': False, 154 | 'index': False, 155 | 'definition': None, 156 | 'options': None, 157 | } 158 | } 159 | self.assertDictEqual(expected_table_constraints, table_constraints) 160 | 161 | calls = mock_cursor.method_calls 162 | 163 | # Should be a sequence of 3x execute and fetchall calls 164 | expected_call_sequence = ['execute', 'fetchall'] * 3 165 | actual_call_sequence = [name for (name, _args, _kwargs) in calls] 166 | self.assertEqual(expected_call_sequence, actual_call_sequence) 167 | 168 | # Constraints query 169 | call_method, call_args, call_kwargs = calls[0] 170 | executed_sql = norm_sql(call_args[0]) 171 | self.assertNotIn('collation', executed_sql) 172 | self.assertNotIn('unnest', executed_sql) 173 | self.assertEqual(self.expected_constraints_query, executed_sql) 174 | 175 | # Attributes query 176 | call_method, call_args, call_kwargs = calls[2] 177 | executed_sql = norm_sql(call_args[0]) 178 | self.assertNotIn('collation', executed_sql) 179 | self.assertNotIn('unnest', executed_sql) 180 | self.assertEqual(self.expected_attributes_query, executed_sql) 181 | 182 | # Indexes query 183 | call_method, call_args, call_kwargs = calls[4] 184 | executed_sql = norm_sql(call_args[0]) 185 | self.assertNotIn('collation', executed_sql) 186 | self.assertNotIn('unnest', executed_sql) 187 | self.assertEqual(self.expected_indexes_query, executed_sql) 188 | 189 | 190 | @skipif_no_database 191 | class InspectDbTests(OperationTestBase): 192 | available_apps = [] 193 | databases = {'default'} 194 | 195 | expected_pony_model = dedent(''' 196 | from django.db import models 197 | 198 | 199 | class TestPony(models.Model): 200 | pink = models.IntegerField() 201 | weight = models.FloatField() 202 | 203 | class Meta: 204 | managed = False 205 | db_table = 'test_pony' 206 | ''') 207 | 208 | def tearDown(self): 209 | self.cleanup_test_tables() 210 | 211 | @postgres_fixture() 212 | def test_inspectdb(self): 213 | self.set_up_test_model('test') 214 | out = StringIO() 215 | call_command('inspectdb', 'test_pony', stdout=out) 216 | print(out.getvalue()) 217 | self.assertIn(self.expected_pony_model, out.getvalue()) 218 | -------------------------------------------------------------------------------- /django_redshift_backend/_vendor/django40/db/backends/base/introspection.py: -------------------------------------------------------------------------------- 1 | from collections import namedtuple 2 | 3 | # Structure returned by DatabaseIntrospection.get_table_list() 4 | TableInfo = namedtuple("TableInfo", ["name", "type"]) 5 | 6 | # Structure returned by the DB-API cursor.description interface (PEP 249) 7 | FieldInfo = namedtuple( 8 | "FieldInfo", 9 | "name type_code display_size internal_size precision scale null_ok " 10 | "default collation", 11 | ) 12 | 13 | 14 | class BaseDatabaseIntrospection: 15 | """Encapsulate backend-specific introspection utilities.""" 16 | 17 | data_types_reverse = {} 18 | 19 | def __init__(self, connection): 20 | self.connection = connection 21 | 22 | def get_field_type(self, data_type, description): 23 | """ 24 | Hook for a database backend to use the cursor description to 25 | match a Django field type to a database column. 26 | 27 | For Oracle, the column data_type on its own is insufficient to 28 | distinguish between a FloatField and IntegerField, for example. 29 | """ 30 | return self.data_types_reverse[data_type] 31 | 32 | def identifier_converter(self, name): 33 | """ 34 | Apply a conversion to the identifier for the purposes of comparison. 35 | 36 | The default identifier converter is for case sensitive comparison. 37 | """ 38 | return name 39 | 40 | def table_names(self, cursor=None, include_views=False): 41 | """ 42 | Return a list of names of all tables that exist in the database. 43 | Sort the returned table list by Python's default sorting. Do NOT use 44 | the database's ORDER BY here to avoid subtle differences in sorting 45 | order between databases. 46 | """ 47 | 48 | def get_names(cursor): 49 | return sorted( 50 | ti.name 51 | for ti in self.get_table_list(cursor) 52 | if include_views or ti.type == "t" 53 | ) 54 | 55 | if cursor is None: 56 | with self.connection.cursor() as cursor: 57 | return get_names(cursor) 58 | return get_names(cursor) 59 | 60 | def get_table_list(self, cursor): 61 | """ 62 | Return an unsorted list of TableInfo named tuples of all tables and 63 | views that exist in the database. 64 | """ 65 | raise NotImplementedError( 66 | "subclasses of BaseDatabaseIntrospection may require a get_table_list() " 67 | "method" 68 | ) 69 | 70 | def get_table_description(self, cursor, table_name): 71 | """ 72 | Return a description of the table with the DB-API cursor.description 73 | interface. 74 | """ 75 | raise NotImplementedError( 76 | "subclasses of BaseDatabaseIntrospection may require a " 77 | "get_table_description() method." 78 | ) 79 | 80 | def get_migratable_models(self): 81 | from django.apps import apps 82 | from django.db import router 83 | 84 | return ( 85 | model 86 | for app_config in apps.get_app_configs() 87 | for model in router.get_migratable_models(app_config, self.connection.alias) 88 | if model._meta.can_migrate(self.connection) 89 | ) 90 | 91 | def django_table_names(self, only_existing=False, include_views=True): 92 | """ 93 | Return a list of all table names that have associated Django models and 94 | are in INSTALLED_APPS. 95 | 96 | If only_existing is True, include only the tables in the database. 97 | """ 98 | tables = set() 99 | for model in self.get_migratable_models(): 100 | if not model._meta.managed: 101 | continue 102 | tables.add(model._meta.db_table) 103 | tables.update( 104 | f.m2m_db_table() 105 | for f in model._meta.local_many_to_many 106 | if f.remote_field.through._meta.managed 107 | ) 108 | tables = list(tables) 109 | if only_existing: 110 | existing_tables = set(self.table_names(include_views=include_views)) 111 | tables = [ 112 | t for t in tables if self.identifier_converter(t) in existing_tables 113 | ] 114 | return tables 115 | 116 | def installed_models(self, tables): 117 | """ 118 | Return a set of all models represented by the provided list of table 119 | names. 120 | """ 121 | tables = set(map(self.identifier_converter, tables)) 122 | return { 123 | m 124 | for m in self.get_migratable_models() 125 | if self.identifier_converter(m._meta.db_table) in tables 126 | } 127 | 128 | def sequence_list(self): 129 | """ 130 | Return a list of information about all DB sequences for all models in 131 | all apps. 132 | """ 133 | sequence_list = [] 134 | with self.connection.cursor() as cursor: 135 | for model in self.get_migratable_models(): 136 | if not model._meta.managed: 137 | continue 138 | if model._meta.swapped: 139 | continue 140 | sequence_list.extend( 141 | self.get_sequences( 142 | cursor, model._meta.db_table, model._meta.local_fields 143 | ) 144 | ) 145 | for f in model._meta.local_many_to_many: 146 | # If this is an m2m using an intermediate table, 147 | # we don't need to reset the sequence. 148 | if f.remote_field.through._meta.auto_created: 149 | sequence = self.get_sequences(cursor, f.m2m_db_table()) 150 | sequence_list.extend( 151 | sequence or [{"table": f.m2m_db_table(), "column": None}] 152 | ) 153 | return sequence_list 154 | 155 | def get_sequences(self, cursor, table_name, table_fields=()): 156 | """ 157 | Return a list of introspected sequences for table_name. Each sequence 158 | is a dict: {'table': , 'column': }. An optional 159 | 'name' key can be added if the backend supports named sequences. 160 | """ 161 | raise NotImplementedError( 162 | "subclasses of BaseDatabaseIntrospection may require a get_sequences() " 163 | "method" 164 | ) 165 | 166 | def get_relations(self, cursor, table_name): 167 | """ 168 | Return a dictionary of 169 | {field_name: (field_name_other_table, other_table)} representing all 170 | relationships to the given table. 171 | """ 172 | raise NotImplementedError( 173 | "subclasses of BaseDatabaseIntrospection may require a " 174 | "get_relations() method." 175 | ) 176 | 177 | def get_key_columns(self, cursor, table_name): 178 | """ 179 | Backends can override this to return a list of: 180 | (column_name, referenced_table_name, referenced_column_name) 181 | for all key columns in given table. 182 | """ 183 | raise NotImplementedError( 184 | "subclasses of BaseDatabaseIntrospection may require a get_key_columns() " 185 | "method" 186 | ) 187 | 188 | def get_primary_key_column(self, cursor, table_name): 189 | """ 190 | Return the name of the primary key column for the given table. 191 | """ 192 | for constraint in self.get_constraints(cursor, table_name).values(): 193 | if constraint["primary_key"]: 194 | return constraint["columns"][0] 195 | return None 196 | 197 | def get_constraints(self, cursor, table_name): 198 | """ 199 | Retrieve any constraints or keys (unique, pk, fk, check, index) 200 | across one or more columns. 201 | 202 | Return a dict mapping constraint names to their attributes, 203 | where attributes is a dict with keys: 204 | * columns: List of columns this covers 205 | * primary_key: True if primary key, False otherwise 206 | * unique: True if this is a unique constraint, False otherwise 207 | * foreign_key: (table, column) of target, or None 208 | * check: True if check constraint, False otherwise 209 | * index: True if index, False otherwise. 210 | * orders: The order (ASC/DESC) defined for the columns of indexes 211 | * type: The type of the index (btree, hash, etc.) 212 | 213 | Some backends may return special constraint names that don't exist 214 | if they don't name constraints of a certain type (e.g. SQLite) 215 | """ 216 | raise NotImplementedError( 217 | "subclasses of BaseDatabaseIntrospection may require a get_constraints() " 218 | "method" 219 | ) 220 | -------------------------------------------------------------------------------- /django_redshift_backend/_vendor/django40/db/backends/ddl_references.py: -------------------------------------------------------------------------------- 1 | """ 2 | Helpers to manipulate deferred DDL statements that might need to be adjusted or 3 | discarded within when executing a migration. 4 | """ 5 | from copy import deepcopy 6 | 7 | 8 | class Reference: 9 | """Base class that defines the reference interface.""" 10 | 11 | def references_table(self, table): 12 | """ 13 | Return whether or not this instance references the specified table. 14 | """ 15 | return False 16 | 17 | def references_column(self, table, column): 18 | """ 19 | Return whether or not this instance references the specified column. 20 | """ 21 | return False 22 | 23 | def rename_table_references(self, old_table, new_table): 24 | """ 25 | Rename all references to the old_name to the new_table. 26 | """ 27 | pass 28 | 29 | def rename_column_references(self, table, old_column, new_column): 30 | """ 31 | Rename all references to the old_column to the new_column. 32 | """ 33 | pass 34 | 35 | def __repr__(self): 36 | return "<%s %r>" % (self.__class__.__name__, str(self)) 37 | 38 | def __str__(self): 39 | raise NotImplementedError( 40 | "Subclasses must define how they should be converted to string." 41 | ) 42 | 43 | 44 | class Table(Reference): 45 | """Hold a reference to a table.""" 46 | 47 | def __init__(self, table, quote_name): 48 | self.table = table 49 | self.quote_name = quote_name 50 | 51 | def references_table(self, table): 52 | return self.table == table 53 | 54 | def rename_table_references(self, old_table, new_table): 55 | if self.table == old_table: 56 | self.table = new_table 57 | 58 | def __str__(self): 59 | return self.quote_name(self.table) 60 | 61 | 62 | class TableColumns(Table): 63 | """Base class for references to multiple columns of a table.""" 64 | 65 | def __init__(self, table, columns): 66 | self.table = table 67 | self.columns = columns 68 | 69 | def references_column(self, table, column): 70 | return self.table == table and column in self.columns 71 | 72 | def rename_column_references(self, table, old_column, new_column): 73 | if self.table == table: 74 | for index, column in enumerate(self.columns): 75 | if column == old_column: 76 | self.columns[index] = new_column 77 | 78 | 79 | class Columns(TableColumns): 80 | """Hold a reference to one or many columns.""" 81 | 82 | def __init__(self, table, columns, quote_name, col_suffixes=()): 83 | self.quote_name = quote_name 84 | self.col_suffixes = col_suffixes 85 | super().__init__(table, columns) 86 | 87 | def __str__(self): 88 | def col_str(column, idx): 89 | col = self.quote_name(column) 90 | try: 91 | suffix = self.col_suffixes[idx] 92 | if suffix: 93 | col = "{} {}".format(col, suffix) 94 | except IndexError: 95 | pass 96 | return col 97 | 98 | return ", ".join( 99 | col_str(column, idx) for idx, column in enumerate(self.columns) 100 | ) 101 | 102 | 103 | class IndexName(TableColumns): 104 | """Hold a reference to an index name.""" 105 | 106 | def __init__(self, table, columns, suffix, create_index_name): 107 | self.suffix = suffix 108 | self.create_index_name = create_index_name 109 | super().__init__(table, columns) 110 | 111 | def __str__(self): 112 | return self.create_index_name(self.table, self.columns, self.suffix) 113 | 114 | 115 | class IndexColumns(Columns): 116 | def __init__(self, table, columns, quote_name, col_suffixes=(), opclasses=()): 117 | self.opclasses = opclasses 118 | super().__init__(table, columns, quote_name, col_suffixes) 119 | 120 | def __str__(self): 121 | def col_str(column, idx): 122 | # Index.__init__() guarantees that self.opclasses is the same 123 | # length as self.columns. 124 | col = "{} {}".format(self.quote_name(column), self.opclasses[idx]) 125 | try: 126 | suffix = self.col_suffixes[idx] 127 | if suffix: 128 | col = "{} {}".format(col, suffix) 129 | except IndexError: 130 | pass 131 | return col 132 | 133 | return ", ".join( 134 | col_str(column, idx) for idx, column in enumerate(self.columns) 135 | ) 136 | 137 | 138 | class ForeignKeyName(TableColumns): 139 | """Hold a reference to a foreign key name.""" 140 | 141 | def __init__( 142 | self, 143 | from_table, 144 | from_columns, 145 | to_table, 146 | to_columns, 147 | suffix_template, 148 | create_fk_name, 149 | ): 150 | self.to_reference = TableColumns(to_table, to_columns) 151 | self.suffix_template = suffix_template 152 | self.create_fk_name = create_fk_name 153 | super().__init__( 154 | from_table, 155 | from_columns, 156 | ) 157 | 158 | def references_table(self, table): 159 | return super().references_table(table) or self.to_reference.references_table( 160 | table 161 | ) 162 | 163 | def references_column(self, table, column): 164 | return super().references_column( 165 | table, column 166 | ) or self.to_reference.references_column(table, column) 167 | 168 | def rename_table_references(self, old_table, new_table): 169 | super().rename_table_references(old_table, new_table) 170 | self.to_reference.rename_table_references(old_table, new_table) 171 | 172 | def rename_column_references(self, table, old_column, new_column): 173 | super().rename_column_references(table, old_column, new_column) 174 | self.to_reference.rename_column_references(table, old_column, new_column) 175 | 176 | def __str__(self): 177 | suffix = self.suffix_template % { 178 | "to_table": self.to_reference.table, 179 | "to_column": self.to_reference.columns[0], 180 | } 181 | return self.create_fk_name(self.table, self.columns, suffix) 182 | 183 | 184 | class Statement(Reference): 185 | """ 186 | Statement template and formatting parameters container. 187 | 188 | Allows keeping a reference to a statement without interpolating identifiers 189 | that might have to be adjusted if they're referencing a table or column 190 | that is removed 191 | """ 192 | 193 | def __init__(self, template, **parts): 194 | self.template = template 195 | self.parts = parts 196 | 197 | def references_table(self, table): 198 | return any( 199 | hasattr(part, "references_table") and part.references_table(table) 200 | for part in self.parts.values() 201 | ) 202 | 203 | def references_column(self, table, column): 204 | return any( 205 | hasattr(part, "references_column") and part.references_column(table, column) 206 | for part in self.parts.values() 207 | ) 208 | 209 | def rename_table_references(self, old_table, new_table): 210 | for part in self.parts.values(): 211 | if hasattr(part, "rename_table_references"): 212 | part.rename_table_references(old_table, new_table) 213 | 214 | def rename_column_references(self, table, old_column, new_column): 215 | for part in self.parts.values(): 216 | if hasattr(part, "rename_column_references"): 217 | part.rename_column_references(table, old_column, new_column) 218 | 219 | def __str__(self): 220 | return self.template % self.parts 221 | 222 | 223 | class Expressions(TableColumns): 224 | def __init__(self, table, expressions, compiler, quote_value): 225 | self.compiler = compiler 226 | self.expressions = expressions 227 | self.quote_value = quote_value 228 | columns = [ 229 | col.target.column 230 | for col in self.compiler.query._gen_cols([self.expressions]) 231 | ] 232 | super().__init__(table, columns) 233 | 234 | def rename_table_references(self, old_table, new_table): 235 | if self.table != old_table: 236 | return 237 | self.expressions = self.expressions.relabeled_clone({old_table: new_table}) 238 | super().rename_table_references(old_table, new_table) 239 | 240 | def rename_column_references(self, table, old_column, new_column): 241 | if self.table != table: 242 | return 243 | expressions = deepcopy(self.expressions) 244 | self.columns = [] 245 | for col in self.compiler.query._gen_cols([expressions]): 246 | if col.target.column == old_column: 247 | col.target.column = new_column 248 | self.columns.append(col.target.column) 249 | self.expressions = expressions 250 | 251 | def __str__(self): 252 | sql, params = self.compiler.compile(self.expressions) 253 | params = map(self.quote_value, params) 254 | return sql % tuple(params) 255 | -------------------------------------------------------------------------------- /django_redshift_backend/_vendor/django40/db/backends/utils.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | import decimal 3 | import functools 4 | import hashlib 5 | import logging 6 | import time 7 | from contextlib import contextmanager 8 | 9 | from django.db import NotSupportedError 10 | from django.utils.dateparse import parse_time 11 | 12 | logger = logging.getLogger("django.db.backends") 13 | 14 | 15 | class CursorWrapper: 16 | def __init__(self, cursor, db): 17 | self.cursor = cursor 18 | self.db = db 19 | 20 | WRAP_ERROR_ATTRS = frozenset(["fetchone", "fetchmany", "fetchall", "nextset"]) 21 | 22 | def __getattr__(self, attr): 23 | cursor_attr = getattr(self.cursor, attr) 24 | if attr in CursorWrapper.WRAP_ERROR_ATTRS: 25 | return self.db.wrap_database_errors(cursor_attr) 26 | else: 27 | return cursor_attr 28 | 29 | def __iter__(self): 30 | with self.db.wrap_database_errors: 31 | yield from self.cursor 32 | 33 | def __enter__(self): 34 | return self 35 | 36 | def __exit__(self, type, value, traceback): 37 | # Close instead of passing through to avoid backend-specific behavior 38 | # (#17671). Catch errors liberally because errors in cleanup code 39 | # aren't useful. 40 | try: 41 | self.close() 42 | except self.db.Database.Error: 43 | pass 44 | 45 | # The following methods cannot be implemented in __getattr__, because the 46 | # code must run when the method is invoked, not just when it is accessed. 47 | 48 | def callproc(self, procname, params=None, kparams=None): 49 | # Keyword parameters for callproc aren't supported in PEP 249, but the 50 | # database driver may support them (e.g. cx_Oracle). 51 | if kparams is not None and not self.db.features.supports_callproc_kwargs: 52 | raise NotSupportedError( 53 | "Keyword parameters for callproc are not supported on this " 54 | "database backend." 55 | ) 56 | self.db.validate_no_broken_transaction() 57 | with self.db.wrap_database_errors: 58 | if params is None and kparams is None: 59 | return self.cursor.callproc(procname) 60 | elif kparams is None: 61 | return self.cursor.callproc(procname, params) 62 | else: 63 | params = params or () 64 | return self.cursor.callproc(procname, params, kparams) 65 | 66 | def execute(self, sql, params=None): 67 | return self._execute_with_wrappers( 68 | sql, params, many=False, executor=self._execute 69 | ) 70 | 71 | def executemany(self, sql, param_list): 72 | return self._execute_with_wrappers( 73 | sql, param_list, many=True, executor=self._executemany 74 | ) 75 | 76 | def _execute_with_wrappers(self, sql, params, many, executor): 77 | context = {"connection": self.db, "cursor": self} 78 | for wrapper in reversed(self.db.execute_wrappers): 79 | executor = functools.partial(wrapper, executor) 80 | return executor(sql, params, many, context) 81 | 82 | def _execute(self, sql, params, *ignored_wrapper_args): 83 | self.db.validate_no_broken_transaction() 84 | with self.db.wrap_database_errors: 85 | if params is None: 86 | # params default might be backend specific. 87 | return self.cursor.execute(sql) 88 | else: 89 | return self.cursor.execute(sql, params) 90 | 91 | def _executemany(self, sql, param_list, *ignored_wrapper_args): 92 | self.db.validate_no_broken_transaction() 93 | with self.db.wrap_database_errors: 94 | return self.cursor.executemany(sql, param_list) 95 | 96 | 97 | class CursorDebugWrapper(CursorWrapper): 98 | # XXX callproc isn't instrumented at this time. 99 | 100 | def execute(self, sql, params=None): 101 | with self.debug_sql(sql, params, use_last_executed_query=True): 102 | return super().execute(sql, params) 103 | 104 | def executemany(self, sql, param_list): 105 | with self.debug_sql(sql, param_list, many=True): 106 | return super().executemany(sql, param_list) 107 | 108 | @contextmanager 109 | def debug_sql( 110 | self, sql=None, params=None, use_last_executed_query=False, many=False 111 | ): 112 | start = time.monotonic() 113 | try: 114 | yield 115 | finally: 116 | stop = time.monotonic() 117 | duration = stop - start 118 | if use_last_executed_query: 119 | sql = self.db.ops.last_executed_query(self.cursor, sql, params) 120 | try: 121 | times = len(params) if many else "" 122 | except TypeError: 123 | # params could be an iterator. 124 | times = "?" 125 | self.db.queries_log.append( 126 | { 127 | "sql": "%s times: %s" % (times, sql) if many else sql, 128 | "time": "%.3f" % duration, 129 | } 130 | ) 131 | logger.debug( 132 | "(%.3f) %s; args=%s; alias=%s", 133 | duration, 134 | sql, 135 | params, 136 | self.db.alias, 137 | extra={ 138 | "duration": duration, 139 | "sql": sql, 140 | "params": params, 141 | "alias": self.db.alias, 142 | }, 143 | ) 144 | 145 | 146 | def split_tzname_delta(tzname): 147 | """ 148 | Split a time zone name into a 3-tuple of (name, sign, offset). 149 | """ 150 | for sign in ["+", "-"]: 151 | if sign in tzname: 152 | name, offset = tzname.rsplit(sign, 1) 153 | if offset and parse_time(offset): 154 | return name, sign, offset 155 | return tzname, None, None 156 | 157 | 158 | ############################################### 159 | # Converters from database (string) to Python # 160 | ############################################### 161 | 162 | 163 | def typecast_date(s): 164 | return ( 165 | datetime.date(*map(int, s.split("-"))) if s else None 166 | ) # return None if s is null 167 | 168 | 169 | def typecast_time(s): # does NOT store time zone information 170 | if not s: 171 | return None 172 | hour, minutes, seconds = s.split(":") 173 | if "." in seconds: # check whether seconds have a fractional part 174 | seconds, microseconds = seconds.split(".") 175 | else: 176 | microseconds = "0" 177 | return datetime.time( 178 | int(hour), int(minutes), int(seconds), int((microseconds + "000000")[:6]) 179 | ) 180 | 181 | 182 | def typecast_timestamp(s): # does NOT store time zone information 183 | # "2005-07-29 15:48:00.590358-05" 184 | # "2005-07-29 09:56:00-05" 185 | if not s: 186 | return None 187 | if " " not in s: 188 | return typecast_date(s) 189 | d, t = s.split() 190 | # Remove timezone information. 191 | if "-" in t: 192 | t, _ = t.split("-", 1) 193 | elif "+" in t: 194 | t, _ = t.split("+", 1) 195 | dates = d.split("-") 196 | times = t.split(":") 197 | seconds = times[2] 198 | if "." in seconds: # check whether seconds have a fractional part 199 | seconds, microseconds = seconds.split(".") 200 | else: 201 | microseconds = "0" 202 | return datetime.datetime( 203 | int(dates[0]), 204 | int(dates[1]), 205 | int(dates[2]), 206 | int(times[0]), 207 | int(times[1]), 208 | int(seconds), 209 | int((microseconds + "000000")[:6]), 210 | ) 211 | 212 | 213 | ############################################### 214 | # Converters from Python to database (string) # 215 | ############################################### 216 | 217 | 218 | def split_identifier(identifier): 219 | """ 220 | Split an SQL identifier into a two element tuple of (namespace, name). 221 | 222 | The identifier could be a table, column, or sequence name might be prefixed 223 | by a namespace. 224 | """ 225 | try: 226 | namespace, name = identifier.split('"."') 227 | except ValueError: 228 | namespace, name = "", identifier 229 | return namespace.strip('"'), name.strip('"') 230 | 231 | 232 | def truncate_name(identifier, length=None, hash_len=4): 233 | """ 234 | Shorten an SQL identifier to a repeatable mangled version with the given 235 | length. 236 | 237 | If a quote stripped name contains a namespace, e.g. USERNAME"."TABLE, 238 | truncate the table portion only. 239 | """ 240 | namespace, name = split_identifier(identifier) 241 | 242 | if length is None or len(name) <= length: 243 | return identifier 244 | 245 | digest = names_digest(name, length=hash_len) 246 | return "%s%s%s" % ( 247 | '%s"."' % namespace if namespace else "", 248 | name[: length - hash_len], 249 | digest, 250 | ) 251 | 252 | 253 | def names_digest(*args, length): 254 | """ 255 | Generate a 32-bit digest of a set of arguments that can be used to shorten 256 | identifying names. 257 | """ 258 | h = hashlib.md5() 259 | for arg in args: 260 | h.update(arg.encode()) 261 | return h.hexdigest()[:length] 262 | 263 | 264 | def format_number(value, max_digits, decimal_places): 265 | """ 266 | Format a number into a string with the requisite number of digits and 267 | decimal places. 268 | """ 269 | if value is None: 270 | return None 271 | context = decimal.getcontext().copy() 272 | if max_digits is not None: 273 | context.prec = max_digits 274 | if decimal_places is not None: 275 | value = value.quantize( 276 | decimal.Decimal(1).scaleb(-decimal_places), context=context 277 | ) 278 | else: 279 | context.traps[decimal.Rounded] = 1 280 | value = context.create_decimal(value) 281 | return "{:f}".format(value) 282 | 283 | 284 | def strip_quotes(table_name): 285 | """ 286 | Strip quotes off of quoted table names to make them safe for use in index 287 | names, sequence names, etc. For example '"USER"."TABLE"' (an Oracle naming 288 | scheme) becomes 'USER"."TABLE'. 289 | """ 290 | has_quotes = table_name.startswith('"') and table_name.endswith('"') 291 | return table_name[1:-1] if has_quotes else table_name 292 | -------------------------------------------------------------------------------- /CHANGES.rst: -------------------------------------------------------------------------------- 1 | CHANGES 2 | ======= 3 | 4 | 5.1.0 (unreleased) 5 | ------------------ 6 | 7 | General: 8 | 9 | Features: 10 | 11 | Bug Fixes: 12 | 13 | 5.0.0 (2024/11/28) 14 | ------------------ 15 | 16 | General: 17 | 18 | Features: 19 | 20 | * #144 Add Python-3.13 support. 21 | * #141 Drop Django-3.2, 4.0 support. 22 | * #142 Drop Python-3.8 support. 23 | 24 | Bug Fixes: 25 | 26 | 4.2.0 (2024/10/30) 27 | ------------------ 28 | 29 | General: 30 | 31 | * #149 Added a clear message on ImportError when the psycopg2 32 | package cannot be found. 33 | Please refer to the following site for more information: 34 | https://django-redshift-backend.readthedocs.io/en/master/basic.html#installation 35 | 36 | Features: 37 | 38 | * #143 Add Django-5.0 support. 39 | * #152 Add Django-5.1 support. 40 | 41 | Bug Fixes: 42 | 43 | * fixes #12, #154 : disabling native json support. Redshift does not natively support JSON. 44 | 45 | 4.1.1 (2024/08/20) 46 | ------------------ 47 | 48 | Bug Fixes: 49 | 50 | * #147 Broken django.db.backends.signals.connection_created signal 51 | 52 | 4.1.0 (2024/07/27) 53 | ------------------ 54 | 55 | Features: 56 | 57 | * #140 Add Python-3.11 and 3.12 support. Thanks to Grzegorz Śliwiński. 58 | 59 | 4.0.0 (2024/07/23) 60 | ------------------ 61 | 62 | General: 63 | 64 | Incompatible Changes: 65 | 66 | Features: 67 | 68 | * #116 Add Django-4.2 support. 69 | Special thanks to Grzegorz Śliwiński, who made a significant contribution to the development of Django-4.1 support in PR #111. Using this as a springboard, we have now made it possible to support Django-4.2. 70 | * #83 Drop Django-2.2 support. 71 | * #83 Drop Python-3.6 support. 72 | * #127 Drop Python-3.7 support. 73 | * #83 Drop Django-2.2 support. 74 | * #134 Support adding COLUMN with UNIQUE; adding column without UNIQUE then add UNIQUE CONSTRAINT. 75 | * #135 Support adding BinaryField. 76 | * #132 Use 36 length for UUIDFields to support including hyphens. Thanks to kylie. 77 | 78 | Bug Fixes: 79 | 80 | * #134 inspectdb should suppress output 'id = AutoField(primary_key=True)' 81 | * #134 fix for decreasing size of column with default by create-copy-drop-rename strategy. 82 | * #118 fix constraint creation using the wrong table and column name. Thanks to BlueMagma. 83 | 84 | 3.0.0 (2022/02/27) 85 | ------------------ 86 | 87 | General: 88 | 89 | * #87 Drop py2 wheel tag from release package file. 90 | * Add `CODE_OF_CONDUCT.rst` The linked text which has been referred to from CONTRIBUTING.rst is now included. 91 | 92 | Incompatible Changes: 93 | 94 | * #97 To specify SORTKEY for Redshift, you must use `django_redshift_backend.SortKey` for 95 | `Model.Meta.ordering` instead of bearer string. 96 | 97 | **IMPORTANT**: 98 | With this change, existing migration files that specify ordering are not affected. 99 | If you want to apply SortKey to your migration files, please comment out the ordering option once and run 100 | makemigrations, then comment in the ordering option and run makemigrations again. 101 | 102 | * #97 `django_redshift_backend.distkey.DistKey` is moved to `django_redshift_backend.DistKey`. 103 | However old name is still supported for a compatibility. 104 | 105 | * #97 Now django-redshift-backend doesn't support `can_rollback_ddl`. 106 | Originally, Redshift did not support column name/type(size) changes within a transaction. 107 | Please refer https://github.com/jazzband/django-redshift-backend/issues/96 108 | 109 | * #97 changed the behavior of implicit not null column addition. 110 | previously, adding a not null column was implicitly changed to allow null. 111 | now adding not null without default raises a programmingerror exception. 112 | 113 | Features: 114 | 115 | * #82 Add Python-3.10 support. 116 | * #98 Add Django-4.0 support. 117 | * #82 Drop Django-3.0 support. 118 | * #98 Drop Django-3.1 support. 119 | * #90,#13,#8: Support `manage.py inspectdb`, also support working with the django-sql-explorer package. 120 | Thanks to Matt Fisher. 121 | * #63 Support changing a field from NOT NULL to NULL on migrate / sqlmigrate. 122 | * #97 Support VARCHAR size changing for UNIQUE, PRIMARY KEY, FOREIGN KEY. 123 | * #97 Support backward migration for DROP NOT NULL column wituout DEFAULT. 124 | One limitation is that the DEFAULT value is set to match the type. This is because the only way for 125 | Redshift to add NOT NULL without default is to recreate the table. 126 | 127 | Bug Fixes: 128 | 129 | * #92,#93: since django-3.0 sqlmigrate (and migrate) does not work. 130 | * #37: fix Django `contenttype` migration that cause `ProgrammingError: cannot drop sortkey column 131 | "name"` exception. 132 | * #64: fix Django `auth` migration that cause `NotSupportedError: column "content_type__app_label" 133 | specified as distkey/sortkey is not in the table "auth_permission"` exception. 134 | 135 | 2.1.0 (2021/09/23) 136 | ------------------ 137 | 138 | General: 139 | 140 | * #76 fix test failing on django-dev with py36,py37 141 | * #77 Mondernize setup.cfg and pyproject.toml 142 | 143 | Features: 144 | 145 | * #81 Add Django 3.2 support. 146 | 147 | Bug Fixes: 148 | 149 | * #80 uuid field doesn't work correctly with django 2.x and 3.x. Thanks to xavier-lr. 150 | 151 | 2.0.1 (2021/03/07) 152 | ------------------ 153 | 154 | Bug Fixes: 155 | 156 | * #74: set supports_aggregate_filter_clause=False (since Django-2.0) to disable FILTER WHERE syntax. Thanks to Michael Wheeler. 157 | * #73: fix broken feature flags since Django-3.0: can_return_columns_from_insert and can_return_rows_from_bulk_insert. Thanks to Agustín Magaña. 158 | 159 | 2.0.0 (2021/01/04) 160 | ------------------- 161 | 162 | General: 163 | 164 | * #70,#71,#72 Moved CI to GitHub Actions: https://github.com/jazzband/django-redshift-backend/actions 165 | Thkanks to Bruno Alla. 166 | 167 | Features: 168 | 169 | * Drop Python 2.7 and 3.5 support. 170 | * Drop Django 1.11, 2.0 and 2.1 support. 171 | * #68 Add Python 3.8 and 3.9 support. 172 | * #68 Add Django 3.0 and 3.1 support. 173 | 174 | Bug Fixes: 175 | 176 | * #69 Let users choose between psycopg2 binary or source. Thkanks to Bruno Alla. 177 | * #65,#66 Deprecation warning due to invalid escape sequences. Thanks to Karthikeyan Singaravelan. 178 | 179 | Documentation: 180 | 181 | * #67 Just a typo cleanup from refs.rst. Thanks to Kostja P. 182 | 183 | 1.1.0 (2019/08/02) 184 | ------------------ 185 | 186 | * #60 Change dependencies to support Python 3.7 Thanks to Asher Foa. 187 | 188 | 1.0.0 (2019/01/29) 189 | ------------------ 190 | 191 | General: 192 | 193 | * The first release from Jazzband_ organization. 194 | * Using `Development Status :: 5 - Production/Stable`. 195 | * All GitHub/Travis/other URLs in this product has been migrated to `/jazzband/`. 196 | 197 | New Features: 198 | 199 | * #56 Support Django 2.1. 200 | * #57 Support Python 3.7 201 | 202 | Bug Fixes: 203 | 204 | * #53,#54: UUIDField django model field will cause clash. Thanks to Corentin Dupret. 205 | 206 | Development: 207 | 208 | * Adopt setuptools_scm for versioning from git tag. 209 | 210 | .. _Jazzband: https://jazzband.co/ 211 | 212 | 0.9.1 (2018-09-29) 213 | ------------------ 214 | 215 | * fix trove classifier 'License' from BSD to Apache. 216 | * Documentation: Add `Contribution Guideline`_ 217 | 218 | .. _Contribution Guideline: https://django-redshift-backend.readthedocs.io/en/master/dev.html#contribution-guideline 219 | 220 | 0.9 (2018-07-24) 221 | ---------------- 222 | 223 | * #35: Drop support for Django 1.8, 1.9 and 1.10. 224 | * #40: Support Django 2.0. 225 | * #42: Support DISTKEY. Thanks to Benjy Weinberger. 226 | * Documentation: https://django-redshift-backend.rtfd.io/ 227 | * Change LICENSE from 'BSD License' to 'Apache Software License' 228 | 229 | 0.8.1 (2018-06-19) 230 | ------------------ 231 | 232 | * #38: Fix 0.8 doesn't compatible with Python 2. Thanks to Benjy Weinberger. 233 | 234 | 0.8 (2018-06-01) 235 | ---------------- 236 | 237 | Incompatible Changes: 238 | 239 | * #23,#10: Redshift support time zones in time stamps for migration 240 | 241 | **IMPORTANT**: 242 | With this change, the newly created DateTimeField column will be timestamp 243 | with timezone (TIMESTAMPTZ) by migration. Therefore, the existing 244 | DateTimeField and the new DateTimeField will have different data types as a 245 | redshift schema column type. 246 | There are no migration feature by django-redshift-backend. 247 | see also: https://github.com/jazzband/django-redshift-backend/pull/23 248 | 249 | New Features: 250 | 251 | * #20,#26: Support for sortkey. Thanks to Maxime Vdb and Kosei Kitahara. 252 | * #24: Add UUIDField support. Thanks to Sindri Guðmundsson. 253 | * #14: More compat with redshift: not use SELECT DISTINCT ON. 254 | 255 | Bug Fixes: 256 | 257 | * #15,#21: More compat with redshift: not use CHECK. Thanks to Vasil Vangelovski. 258 | * #18: Fix error on migration with django-1.9 or later that raises AttributeError 259 | of 'sql_create_table_unique'. 260 | * #27: annotate() does not work on Django-1.9 and later. Thanks to Takayuki Hirai. 261 | 262 | 263 | Documentation: 264 | 265 | * Add documentation: https://django-redshift-backend.rtfd.io/ 266 | 267 | 268 | 0.7 (2017-06-08) 269 | ---------------- 270 | 271 | * Drop Python-3.4 272 | * Drop Django-1.7 273 | * Support Python-3.6 274 | * Support Django-1.11 275 | 276 | 0.6 (2016-12-15) 277 | ---------------- 278 | 279 | * Fix crush problem when using bulk insert. 280 | 281 | 0.5 (2016-10-05) 282 | ---------------- 283 | 284 | * Support Django-1.10 285 | * #9: Add support for BigAutoField. Thanks to Maxime Vdb. 286 | * Fix crush problem on sqlmigrate when field modified. 287 | 288 | 0.4 (2016-05-17) 289 | ---------------- 290 | 291 | * Support Python-3.4 and 3.5 292 | * #7: Restore support django-1.7. Version 0.3 doesn't support django-1.7. 293 | * #4: More compat with redshift: not use SET CONSTRAINTS. Thanks to Maxime Vdb. 294 | * #6: More compat with redshift: not use sequence reset query. Thanks to Maxime Vdb. 295 | * #5: Add REDSHIFT_VARCHAR_LENGTH_MULTIPLIER settings. Thanks to Maxime Vdb. 296 | * Support column type changing on migration. 297 | 298 | 0.3 (2016-05-14) 299 | ---------------- 300 | 301 | * #3: more compat with Redshift (AutoField, DateTimeField, Index). Thanks to Maxime Vdb. 302 | * More compat with redshift: add TextField 303 | * More compat with redshift: not use DEFERRABLE, CONSTRAINT, DROP DEFAULT 304 | * More compat with redshift: support modify column 305 | 306 | 307 | 0.2.1 (2016-02-01) 308 | ------------------ 309 | 310 | * "SET TIME_ZONE" warning is changed as debug log for 'django.db.backend' logger. 311 | 312 | 0.2 (2016-01-08) 313 | ---------------- 314 | 315 | * Disable "SET TIME_ZONE" SQL execution even if settings.TIME_ZONE is specified. 316 | 317 | 0.1.2 (2015-06-5) 318 | ----------------- 319 | 320 | * Support Django-1.8 321 | 322 | 0.1.1 (2015-03-27) 323 | ------------------ 324 | * Disable "SELECT FOR UPDATE" SQL execution. 325 | 326 | 0.1 (2015-03-24) 327 | ---------------- 328 | * Support Django-1.7 329 | * Support "INSERT INTO" SQL execution without "RETURNING" clause. 330 | 331 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright 2018 Django Redshift Backend team 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | 203 | -------------------------------------------------------------------------------- /django_redshift_backend/_vendor/django40/db/backends/postgresql/introspection.py: -------------------------------------------------------------------------------- 1 | from django_redshift_backend._vendor.django40.db.backends.base.introspection import ( 2 | BaseDatabaseIntrospection, 3 | FieldInfo, 4 | TableInfo, 5 | ) 6 | from django.db.models import Index 7 | 8 | 9 | class DatabaseIntrospection(BaseDatabaseIntrospection): 10 | # Maps type codes to Django Field types. 11 | data_types_reverse = { 12 | 16: "BooleanField", 13 | 17: "BinaryField", 14 | 20: "BigIntegerField", 15 | 21: "SmallIntegerField", 16 | 23: "IntegerField", 17 | 25: "TextField", 18 | 700: "FloatField", 19 | 701: "FloatField", 20 | 869: "GenericIPAddressField", 21 | 1042: "CharField", # blank-padded 22 | 1043: "CharField", 23 | 1082: "DateField", 24 | 1083: "TimeField", 25 | 1114: "DateTimeField", 26 | 1184: "DateTimeField", 27 | 1186: "DurationField", 28 | 1266: "TimeField", 29 | 1700: "DecimalField", 30 | 2950: "UUIDField", 31 | 3802: "JSONField", 32 | } 33 | # A hook for subclasses. 34 | index_default_access_method = "btree" 35 | 36 | ignored_tables = [] 37 | 38 | def get_field_type(self, data_type, description): 39 | field_type = super().get_field_type(data_type, description) 40 | if description.default and "nextval" in description.default: 41 | if field_type == "IntegerField": 42 | return "AutoField" 43 | elif field_type == "BigIntegerField": 44 | return "BigAutoField" 45 | elif field_type == "SmallIntegerField": 46 | return "SmallAutoField" 47 | return field_type 48 | 49 | def get_table_list(self, cursor): 50 | """Return a list of table and view names in the current database.""" 51 | cursor.execute( 52 | """ 53 | SELECT 54 | c.relname, 55 | CASE 56 | WHEN c.relispartition THEN 'p' 57 | WHEN c.relkind IN ('m', 'v') THEN 'v' 58 | ELSE 't' 59 | END 60 | FROM pg_catalog.pg_class c 61 | LEFT JOIN pg_catalog.pg_namespace n ON n.oid = c.relnamespace 62 | WHERE c.relkind IN ('f', 'm', 'p', 'r', 'v') 63 | AND n.nspname NOT IN ('pg_catalog', 'pg_toast') 64 | AND pg_catalog.pg_table_is_visible(c.oid) 65 | """ 66 | ) 67 | return [ 68 | TableInfo(*row) 69 | for row in cursor.fetchall() 70 | if row[0] not in self.ignored_tables 71 | ] 72 | 73 | def get_table_description(self, cursor, table_name): 74 | """ 75 | Return a description of the table with the DB-API cursor.description 76 | interface. 77 | """ 78 | # Query the pg_catalog tables as cursor.description does not reliably 79 | # return the nullable property and information_schema.columns does not 80 | # contain details of materialized views. 81 | cursor.execute( 82 | """ 83 | SELECT 84 | a.attname AS column_name, 85 | NOT (a.attnotnull OR (t.typtype = 'd' AND t.typnotnull)) AS is_nullable, 86 | pg_get_expr(ad.adbin, ad.adrelid) AS column_default, 87 | CASE WHEN collname = 'default' THEN NULL ELSE collname END AS collation 88 | FROM pg_attribute a 89 | LEFT JOIN pg_attrdef ad ON a.attrelid = ad.adrelid AND a.attnum = ad.adnum 90 | LEFT JOIN pg_collation co ON a.attcollation = co.oid 91 | JOIN pg_type t ON a.atttypid = t.oid 92 | JOIN pg_class c ON a.attrelid = c.oid 93 | JOIN pg_namespace n ON c.relnamespace = n.oid 94 | WHERE c.relkind IN ('f', 'm', 'p', 'r', 'v') 95 | AND c.relname = %s 96 | AND n.nspname NOT IN ('pg_catalog', 'pg_toast') 97 | AND pg_catalog.pg_table_is_visible(c.oid) 98 | """, 99 | [table_name], 100 | ) 101 | field_map = {line[0]: line[1:] for line in cursor.fetchall()} 102 | cursor.execute( 103 | "SELECT * FROM %s LIMIT 1" % self.connection.ops.quote_name(table_name) 104 | ) 105 | return [ 106 | FieldInfo( 107 | line.name, 108 | line.type_code, 109 | line.display_size, 110 | line.internal_size, 111 | line.precision, 112 | line.scale, 113 | *field_map[line.name], 114 | ) 115 | for line in cursor.description 116 | ] 117 | 118 | def get_sequences(self, cursor, table_name, table_fields=()): 119 | cursor.execute( 120 | """ 121 | SELECT s.relname as sequence_name, col.attname 122 | FROM pg_class s 123 | JOIN pg_namespace sn ON sn.oid = s.relnamespace 124 | JOIN 125 | pg_depend d ON d.refobjid = s.oid 126 | AND d.refclassid = 'pg_class'::regclass 127 | JOIN 128 | pg_attrdef ad ON ad.oid = d.objid 129 | AND d.classid = 'pg_attrdef'::regclass 130 | JOIN 131 | pg_attribute col ON col.attrelid = ad.adrelid 132 | AND col.attnum = ad.adnum 133 | JOIN pg_class tbl ON tbl.oid = ad.adrelid 134 | WHERE s.relkind = 'S' 135 | AND d.deptype in ('a', 'n') 136 | AND pg_catalog.pg_table_is_visible(tbl.oid) 137 | AND tbl.relname = %s 138 | """, 139 | [table_name], 140 | ) 141 | return [ 142 | {"name": row[0], "table": table_name, "column": row[1]} 143 | for row in cursor.fetchall() 144 | ] 145 | 146 | def get_relations(self, cursor, table_name): 147 | """ 148 | Return a dictionary of {field_name: (field_name_other_table, other_table)} 149 | representing all relationships to the given table. 150 | """ 151 | return { 152 | row[0]: (row[2], row[1]) for row in self.get_key_columns(cursor, table_name) 153 | } 154 | 155 | def get_key_columns(self, cursor, table_name): 156 | cursor.execute( 157 | """ 158 | SELECT a1.attname, c2.relname, a2.attname 159 | FROM pg_constraint con 160 | LEFT JOIN pg_class c1 ON con.conrelid = c1.oid 161 | LEFT JOIN pg_class c2 ON con.confrelid = c2.oid 162 | LEFT JOIN 163 | pg_attribute a1 ON c1.oid = a1.attrelid AND a1.attnum = con.conkey[1] 164 | LEFT JOIN 165 | pg_attribute a2 ON c2.oid = a2.attrelid AND a2.attnum = con.confkey[1] 166 | WHERE 167 | c1.relname = %s AND 168 | con.contype = 'f' AND 169 | c1.relnamespace = c2.relnamespace AND 170 | pg_catalog.pg_table_is_visible(c1.oid) 171 | """, 172 | [table_name], 173 | ) 174 | return cursor.fetchall() 175 | 176 | def get_constraints(self, cursor, table_name): 177 | """ 178 | Retrieve any constraints or keys (unique, pk, fk, check, index) across 179 | one or more columns. Also retrieve the definition of expression-based 180 | indexes. 181 | """ 182 | constraints = {} 183 | # Loop over the key table, collecting things as constraints. The column 184 | # array must return column names in the same order in which they were 185 | # created. 186 | cursor.execute( 187 | """ 188 | SELECT 189 | c.conname, 190 | array( 191 | SELECT attname 192 | FROM unnest(c.conkey) WITH ORDINALITY cols(colid, arridx) 193 | JOIN pg_attribute AS ca ON cols.colid = ca.attnum 194 | WHERE ca.attrelid = c.conrelid 195 | ORDER BY cols.arridx 196 | ), 197 | c.contype, 198 | (SELECT fkc.relname || '.' || fka.attname 199 | FROM pg_attribute AS fka 200 | JOIN pg_class AS fkc ON fka.attrelid = fkc.oid 201 | WHERE fka.attrelid = c.confrelid AND fka.attnum = c.confkey[1]), 202 | cl.reloptions 203 | FROM pg_constraint AS c 204 | JOIN pg_class AS cl ON c.conrelid = cl.oid 205 | WHERE cl.relname = %s AND pg_catalog.pg_table_is_visible(cl.oid) 206 | """, 207 | [table_name], 208 | ) 209 | for constraint, columns, kind, used_cols, options in cursor.fetchall(): 210 | constraints[constraint] = { 211 | "columns": columns, 212 | "primary_key": kind == "p", 213 | "unique": kind in ["p", "u"], 214 | "foreign_key": tuple(used_cols.split(".", 1)) if kind == "f" else None, 215 | "check": kind == "c", 216 | "index": False, 217 | "definition": None, 218 | "options": options, 219 | } 220 | # Now get indexes 221 | cursor.execute( 222 | """ 223 | SELECT 224 | indexname, 225 | array_agg(attname ORDER BY arridx), 226 | indisunique, 227 | indisprimary, 228 | array_agg(ordering ORDER BY arridx), 229 | amname, 230 | exprdef, 231 | s2.attoptions 232 | FROM ( 233 | SELECT 234 | c2.relname as indexname, idx.*, attr.attname, am.amname, 235 | CASE 236 | WHEN idx.indexprs IS NOT NULL THEN 237 | pg_get_indexdef(idx.indexrelid) 238 | END AS exprdef, 239 | CASE am.amname 240 | WHEN %s THEN 241 | CASE (option & 1) 242 | WHEN 1 THEN 'DESC' ELSE 'ASC' 243 | END 244 | END as ordering, 245 | c2.reloptions as attoptions 246 | FROM ( 247 | SELECT * 248 | FROM 249 | pg_index i, 250 | unnest(i.indkey, i.indoption) 251 | WITH ORDINALITY koi(key, option, arridx) 252 | ) idx 253 | LEFT JOIN pg_class c ON idx.indrelid = c.oid 254 | LEFT JOIN pg_class c2 ON idx.indexrelid = c2.oid 255 | LEFT JOIN pg_am am ON c2.relam = am.oid 256 | LEFT JOIN 257 | pg_attribute attr ON attr.attrelid = c.oid AND attr.attnum = idx.key 258 | WHERE c.relname = %s AND pg_catalog.pg_table_is_visible(c.oid) 259 | ) s2 260 | GROUP BY indexname, indisunique, indisprimary, amname, exprdef, attoptions; 261 | """, 262 | [self.index_default_access_method, table_name], 263 | ) 264 | for ( 265 | index, 266 | columns, 267 | unique, 268 | primary, 269 | orders, 270 | type_, 271 | definition, 272 | options, 273 | ) in cursor.fetchall(): 274 | if index not in constraints: 275 | basic_index = ( 276 | type_ == self.index_default_access_method 277 | and 278 | # '_btree' references 279 | # django.contrib.postgres.indexes.BTreeIndex.suffix. 280 | not index.endswith("_btree") 281 | and options is None 282 | ) 283 | constraints[index] = { 284 | "columns": columns if columns != [None] else [], 285 | "orders": orders if orders != [None] else [], 286 | "primary_key": primary, 287 | "unique": unique, 288 | "foreign_key": None, 289 | "check": False, 290 | "index": True, 291 | "type": Index.suffix if basic_index else type_, 292 | "definition": definition, 293 | "options": options, 294 | } 295 | return constraints 296 | -------------------------------------------------------------------------------- /django_redshift_backend/_vendor/django40/db/backends/postgresql/schema.py: -------------------------------------------------------------------------------- 1 | import psycopg2 2 | 3 | from django_redshift_backend._vendor.django40.db.backends.base.schema import BaseDatabaseSchemaEditor 4 | from django_redshift_backend._vendor.django40.db.backends.ddl_references import IndexColumns 5 | from django_redshift_backend._vendor.django40.db.backends.utils import strip_quotes 6 | 7 | 8 | class DatabaseSchemaEditor(BaseDatabaseSchemaEditor): 9 | sql_create_sequence = "CREATE SEQUENCE %(sequence)s" 10 | sql_delete_sequence = "DROP SEQUENCE IF EXISTS %(sequence)s CASCADE" 11 | sql_set_sequence_max = ( 12 | "SELECT setval('%(sequence)s', MAX(%(column)s)) FROM %(table)s" 13 | ) 14 | sql_set_sequence_owner = "ALTER SEQUENCE %(sequence)s OWNED BY %(table)s.%(column)s" 15 | 16 | sql_create_index = ( 17 | "CREATE INDEX %(name)s ON %(table)s%(using)s " 18 | "(%(columns)s)%(include)s%(extra)s%(condition)s" 19 | ) 20 | sql_create_index_concurrently = ( 21 | "CREATE INDEX CONCURRENTLY %(name)s ON %(table)s%(using)s " 22 | "(%(columns)s)%(include)s%(extra)s%(condition)s" 23 | ) 24 | sql_delete_index = "DROP INDEX IF EXISTS %(name)s" 25 | sql_delete_index_concurrently = "DROP INDEX CONCURRENTLY IF EXISTS %(name)s" 26 | 27 | # Setting the constraint to IMMEDIATE to allow changing data in the same 28 | # transaction. 29 | sql_create_column_inline_fk = ( 30 | "CONSTRAINT %(name)s REFERENCES %(to_table)s(%(to_column)s)%(deferrable)s" 31 | "; SET CONSTRAINTS %(namespace)s%(name)s IMMEDIATE" 32 | ) 33 | # Setting the constraint to IMMEDIATE runs any deferred checks to allow 34 | # dropping it in the same transaction. 35 | sql_delete_fk = ( 36 | "SET CONSTRAINTS %(name)s IMMEDIATE; " 37 | "ALTER TABLE %(table)s DROP CONSTRAINT %(name)s" 38 | ) 39 | sql_delete_procedure = "DROP FUNCTION %(procedure)s(%(param_types)s)" 40 | 41 | def quote_value(self, value): 42 | if isinstance(value, str): 43 | value = value.replace("%", "%%") 44 | adapted = psycopg2.extensions.adapt(value) 45 | if hasattr(adapted, "encoding"): 46 | adapted.encoding = "utf8" 47 | # getquoted() returns a quoted bytestring of the adapted value. 48 | return adapted.getquoted().decode() 49 | 50 | def _field_indexes_sql(self, model, field): 51 | output = super()._field_indexes_sql(model, field) 52 | like_index_statement = self._create_like_index_sql(model, field) 53 | if like_index_statement is not None: 54 | output.append(like_index_statement) 55 | return output 56 | 57 | def _field_data_type(self, field): 58 | if field.is_relation: 59 | return field.rel_db_type(self.connection) 60 | return self.connection.data_types.get( 61 | field.get_internal_type(), 62 | field.db_type(self.connection), 63 | ) 64 | 65 | def _field_base_data_types(self, field): 66 | # Yield base data types for array fields. 67 | if field.base_field.get_internal_type() == "ArrayField": 68 | yield from self._field_base_data_types(field.base_field) 69 | else: 70 | yield self._field_data_type(field.base_field) 71 | 72 | def _create_like_index_sql(self, model, field): 73 | """ 74 | Return the statement to create an index with varchar operator pattern 75 | when the column type is 'varchar' or 'text', otherwise return None. 76 | """ 77 | db_type = field.db_type(connection=self.connection) 78 | if db_type is not None and (field.db_index or field.unique): 79 | # Fields with database column types of `varchar` and `text` need 80 | # a second index that specifies their operator class, which is 81 | # needed when performing correct LIKE queries outside the 82 | # C locale. See #12234. 83 | # 84 | # The same doesn't apply to array fields such as varchar[size] 85 | # and text[size], so skip them. 86 | if "[" in db_type: 87 | return None 88 | if db_type.startswith("varchar"): 89 | return self._create_index_sql( 90 | model, 91 | fields=[field], 92 | suffix="_like", 93 | opclasses=["varchar_pattern_ops"], 94 | ) 95 | elif db_type.startswith("text"): 96 | return self._create_index_sql( 97 | model, 98 | fields=[field], 99 | suffix="_like", 100 | opclasses=["text_pattern_ops"], 101 | ) 102 | return None 103 | 104 | def _alter_column_type_sql(self, model, old_field, new_field, new_type): 105 | self.sql_alter_column_type = "ALTER COLUMN %(column)s TYPE %(type)s" 106 | # Cast when data type changed. 107 | using_sql = " USING %(column)s::%(type)s" 108 | new_internal_type = new_field.get_internal_type() 109 | old_internal_type = old_field.get_internal_type() 110 | if new_internal_type == "ArrayField" and new_internal_type == old_internal_type: 111 | # Compare base data types for array fields. 112 | if list(self._field_base_data_types(old_field)) != list( 113 | self._field_base_data_types(new_field) 114 | ): 115 | self.sql_alter_column_type += using_sql 116 | elif self._field_data_type(old_field) != self._field_data_type(new_field): 117 | self.sql_alter_column_type += using_sql 118 | # Make ALTER TYPE with SERIAL make sense. 119 | table = strip_quotes(model._meta.db_table) 120 | serial_fields_map = { 121 | "bigserial": "bigint", 122 | "serial": "integer", 123 | "smallserial": "smallint", 124 | } 125 | if new_type.lower() in serial_fields_map: 126 | column = strip_quotes(new_field.column) 127 | sequence_name = "%s_%s_seq" % (table, column) 128 | return ( 129 | ( 130 | self.sql_alter_column_type 131 | % { 132 | "column": self.quote_name(column), 133 | "type": serial_fields_map[new_type.lower()], 134 | }, 135 | [], 136 | ), 137 | [ 138 | ( 139 | self.sql_delete_sequence 140 | % { 141 | "sequence": self.quote_name(sequence_name), 142 | }, 143 | [], 144 | ), 145 | ( 146 | self.sql_create_sequence 147 | % { 148 | "sequence": self.quote_name(sequence_name), 149 | }, 150 | [], 151 | ), 152 | ( 153 | self.sql_alter_column 154 | % { 155 | "table": self.quote_name(table), 156 | "changes": self.sql_alter_column_default 157 | % { 158 | "column": self.quote_name(column), 159 | "default": "nextval('%s')" 160 | % self.quote_name(sequence_name), 161 | }, 162 | }, 163 | [], 164 | ), 165 | ( 166 | self.sql_set_sequence_max 167 | % { 168 | "table": self.quote_name(table), 169 | "column": self.quote_name(column), 170 | "sequence": self.quote_name(sequence_name), 171 | }, 172 | [], 173 | ), 174 | ( 175 | self.sql_set_sequence_owner 176 | % { 177 | "table": self.quote_name(table), 178 | "column": self.quote_name(column), 179 | "sequence": self.quote_name(sequence_name), 180 | }, 181 | [], 182 | ), 183 | ], 184 | ) 185 | elif ( 186 | old_field.db_parameters(connection=self.connection)["type"] 187 | in serial_fields_map 188 | ): 189 | # Drop the sequence if migrating away from AutoField. 190 | column = strip_quotes(new_field.column) 191 | sequence_name = "%s_%s_seq" % (table, column) 192 | fragment, _ = super()._alter_column_type_sql( 193 | model, old_field, new_field, new_type 194 | ) 195 | return fragment, [ 196 | ( 197 | self.sql_delete_sequence 198 | % { 199 | "sequence": self.quote_name(sequence_name), 200 | }, 201 | [], 202 | ), 203 | ] 204 | else: 205 | return super()._alter_column_type_sql(model, old_field, new_field, new_type) 206 | 207 | def _alter_field( 208 | self, 209 | model, 210 | old_field, 211 | new_field, 212 | old_type, 213 | new_type, 214 | old_db_params, 215 | new_db_params, 216 | strict=False, 217 | ): 218 | # Drop indexes on varchar/text/citext columns that are changing to a 219 | # different type. 220 | if (old_field.db_index or old_field.unique) and ( 221 | (old_type.startswith("varchar") and not new_type.startswith("varchar")) 222 | or (old_type.startswith("text") and not new_type.startswith("text")) 223 | or (old_type.startswith("citext") and not new_type.startswith("citext")) 224 | ): 225 | index_name = self._create_index_name( 226 | model._meta.db_table, [old_field.column], suffix="_like" 227 | ) 228 | self.execute(self._delete_index_sql(model, index_name)) 229 | 230 | super()._alter_field( 231 | model, 232 | old_field, 233 | new_field, 234 | old_type, 235 | new_type, 236 | old_db_params, 237 | new_db_params, 238 | strict, 239 | ) 240 | # Added an index? Create any PostgreSQL-specific indexes. 241 | if (not (old_field.db_index or old_field.unique) and new_field.db_index) or ( 242 | not old_field.unique and new_field.unique 243 | ): 244 | like_index_statement = self._create_like_index_sql(model, new_field) 245 | if like_index_statement is not None: 246 | self.execute(like_index_statement) 247 | 248 | # Removed an index? Drop any PostgreSQL-specific indexes. 249 | if old_field.unique and not (new_field.db_index or new_field.unique): 250 | index_to_remove = self._create_index_name( 251 | model._meta.db_table, [old_field.column], suffix="_like" 252 | ) 253 | self.execute(self._delete_index_sql(model, index_to_remove)) 254 | 255 | def _index_columns(self, table, columns, col_suffixes, opclasses): 256 | if opclasses: 257 | return IndexColumns( 258 | table, 259 | columns, 260 | self.quote_name, 261 | col_suffixes=col_suffixes, 262 | opclasses=opclasses, 263 | ) 264 | return super()._index_columns(table, columns, col_suffixes, opclasses) 265 | 266 | def add_index(self, model, index, concurrently=False): 267 | self.execute( 268 | index.create_sql(model, self, concurrently=concurrently), params=None 269 | ) 270 | 271 | def remove_index(self, model, index, concurrently=False): 272 | self.execute(index.remove_sql(model, self, concurrently=concurrently)) 273 | 274 | def _delete_index_sql(self, model, name, sql=None, concurrently=False): 275 | sql = ( 276 | self.sql_delete_index_concurrently 277 | if concurrently 278 | else self.sql_delete_index 279 | ) 280 | return super()._delete_index_sql(model, name, sql) 281 | 282 | def _create_index_sql( 283 | self, 284 | model, 285 | *, 286 | fields=None, 287 | name=None, 288 | suffix="", 289 | using="", 290 | db_tablespace=None, 291 | col_suffixes=(), 292 | sql=None, 293 | opclasses=(), 294 | condition=None, 295 | concurrently=False, 296 | include=None, 297 | expressions=None, 298 | ): 299 | sql = ( 300 | self.sql_create_index 301 | if not concurrently 302 | else self.sql_create_index_concurrently 303 | ) 304 | return super()._create_index_sql( 305 | model, 306 | fields=fields, 307 | name=name, 308 | suffix=suffix, 309 | using=using, 310 | db_tablespace=db_tablespace, 311 | col_suffixes=col_suffixes, 312 | sql=sql, 313 | opclasses=opclasses, 314 | condition=condition, 315 | include=include, 316 | expressions=expressions, 317 | ) 318 | -------------------------------------------------------------------------------- /django_redshift_backend/_vendor/django40/db/backends/postgresql/operations.py: -------------------------------------------------------------------------------- 1 | from psycopg2.extras import Inet 2 | 3 | from django.conf import settings 4 | from django_redshift_backend._vendor.django40.db.backends.base.operations import BaseDatabaseOperations 5 | from django_redshift_backend._vendor.django40.db.backends.utils import split_tzname_delta 6 | 7 | 8 | class DatabaseOperations(BaseDatabaseOperations): 9 | cast_char_field_without_max_length = "varchar" 10 | explain_prefix = "EXPLAIN" 11 | explain_options = frozenset( 12 | [ 13 | "ANALYZE", 14 | "BUFFERS", 15 | "COSTS", 16 | "SETTINGS", 17 | "SUMMARY", 18 | "TIMING", 19 | "VERBOSE", 20 | "WAL", 21 | ] 22 | ) 23 | cast_data_types = { 24 | "AutoField": "integer", 25 | "BigAutoField": "bigint", 26 | "SmallAutoField": "smallint", 27 | } 28 | 29 | def unification_cast_sql(self, output_field): 30 | internal_type = output_field.get_internal_type() 31 | if internal_type in ( 32 | "GenericIPAddressField", 33 | "IPAddressField", 34 | "TimeField", 35 | "UUIDField", 36 | ): 37 | # PostgreSQL will resolve a union as type 'text' if input types are 38 | # 'unknown'. 39 | # https://www.postgresql.org/docs/current/typeconv-union-case.html 40 | # These fields cannot be implicitly cast back in the default 41 | # PostgreSQL configuration so we need to explicitly cast them. 42 | # We must also remove components of the type within brackets: 43 | # varchar(255) -> varchar. 44 | return ( 45 | "CAST(%%s AS %s)" % output_field.db_type(self.connection).split("(")[0] 46 | ) 47 | return "%s" 48 | 49 | def date_extract_sql(self, lookup_type, field_name): 50 | # https://www.postgresql.org/docs/current/functions-datetime.html#FUNCTIONS-DATETIME-EXTRACT 51 | if lookup_type == "week_day": 52 | # For consistency across backends, we return Sunday=1, Saturday=7. 53 | return "EXTRACT('dow' FROM %s) + 1" % field_name 54 | elif lookup_type == "iso_week_day": 55 | return "EXTRACT('isodow' FROM %s)" % field_name 56 | elif lookup_type == "iso_year": 57 | return "EXTRACT('isoyear' FROM %s)" % field_name 58 | else: 59 | return "EXTRACT('%s' FROM %s)" % (lookup_type, field_name) 60 | 61 | def date_trunc_sql(self, lookup_type, field_name, tzname=None): 62 | field_name = self._convert_field_to_tz(field_name, tzname) 63 | # https://www.postgresql.org/docs/current/functions-datetime.html#FUNCTIONS-DATETIME-TRUNC 64 | return "DATE_TRUNC('%s', %s)" % (lookup_type, field_name) 65 | 66 | def _prepare_tzname_delta(self, tzname): 67 | tzname, sign, offset = split_tzname_delta(tzname) 68 | if offset: 69 | sign = "-" if sign == "+" else "+" 70 | return f"{tzname}{sign}{offset}" 71 | return tzname 72 | 73 | def _convert_field_to_tz(self, field_name, tzname): 74 | if tzname and settings.USE_TZ: 75 | field_name = "%s AT TIME ZONE '%s'" % ( 76 | field_name, 77 | self._prepare_tzname_delta(tzname), 78 | ) 79 | return field_name 80 | 81 | def datetime_cast_date_sql(self, field_name, tzname): 82 | field_name = self._convert_field_to_tz(field_name, tzname) 83 | return "(%s)::date" % field_name 84 | 85 | def datetime_cast_time_sql(self, field_name, tzname): 86 | field_name = self._convert_field_to_tz(field_name, tzname) 87 | return "(%s)::time" % field_name 88 | 89 | def datetime_extract_sql(self, lookup_type, field_name, tzname): 90 | field_name = self._convert_field_to_tz(field_name, tzname) 91 | return self.date_extract_sql(lookup_type, field_name) 92 | 93 | def datetime_trunc_sql(self, lookup_type, field_name, tzname): 94 | field_name = self._convert_field_to_tz(field_name, tzname) 95 | # https://www.postgresql.org/docs/current/functions-datetime.html#FUNCTIONS-DATETIME-TRUNC 96 | return "DATE_TRUNC('%s', %s)" % (lookup_type, field_name) 97 | 98 | def time_trunc_sql(self, lookup_type, field_name, tzname=None): 99 | field_name = self._convert_field_to_tz(field_name, tzname) 100 | return "DATE_TRUNC('%s', %s)::time" % (lookup_type, field_name) 101 | 102 | def deferrable_sql(self): 103 | return " DEFERRABLE INITIALLY DEFERRED" 104 | 105 | def fetch_returned_insert_rows(self, cursor): 106 | """ 107 | Given a cursor object that has just performed an INSERT...RETURNING 108 | statement into a table, return the tuple of returned data. 109 | """ 110 | return cursor.fetchall() 111 | 112 | def lookup_cast(self, lookup_type, internal_type=None): 113 | lookup = "%s" 114 | 115 | # Cast text lookups to text to allow things like filter(x__contains=4) 116 | if lookup_type in ( 117 | "iexact", 118 | "contains", 119 | "icontains", 120 | "startswith", 121 | "istartswith", 122 | "endswith", 123 | "iendswith", 124 | "regex", 125 | "iregex", 126 | ): 127 | if internal_type in ("IPAddressField", "GenericIPAddressField"): 128 | lookup = "HOST(%s)" 129 | elif internal_type in ("CICharField", "CIEmailField", "CITextField"): 130 | lookup = "%s::citext" 131 | else: 132 | lookup = "%s::text" 133 | 134 | # Use UPPER(x) for case-insensitive lookups; it's faster. 135 | if lookup_type in ("iexact", "icontains", "istartswith", "iendswith"): 136 | lookup = "UPPER(%s)" % lookup 137 | 138 | return lookup 139 | 140 | def no_limit_value(self): 141 | return None 142 | 143 | def prepare_sql_script(self, sql): 144 | return [sql] 145 | 146 | def quote_name(self, name): 147 | if name.startswith('"') and name.endswith('"'): 148 | return name # Quoting once is enough. 149 | return '"%s"' % name 150 | 151 | def set_time_zone_sql(self): 152 | return "SET TIME ZONE %s" 153 | 154 | def sql_flush(self, style, tables, *, reset_sequences=False, allow_cascade=False): 155 | if not tables: 156 | return [] 157 | 158 | # Perform a single SQL 'TRUNCATE x, y, z...;' statement. It allows us 159 | # to truncate tables referenced by a foreign key in any other table. 160 | sql_parts = [ 161 | style.SQL_KEYWORD("TRUNCATE"), 162 | ", ".join(style.SQL_FIELD(self.quote_name(table)) for table in tables), 163 | ] 164 | if reset_sequences: 165 | sql_parts.append(style.SQL_KEYWORD("RESTART IDENTITY")) 166 | if allow_cascade: 167 | sql_parts.append(style.SQL_KEYWORD("CASCADE")) 168 | return ["%s;" % " ".join(sql_parts)] 169 | 170 | def sequence_reset_by_name_sql(self, style, sequences): 171 | # 'ALTER SEQUENCE sequence_name RESTART WITH 1;'... style SQL statements 172 | # to reset sequence indices 173 | sql = [] 174 | for sequence_info in sequences: 175 | table_name = sequence_info["table"] 176 | # 'id' will be the case if it's an m2m using an autogenerated 177 | # intermediate table (see BaseDatabaseIntrospection.sequence_list). 178 | column_name = sequence_info["column"] or "id" 179 | sql.append( 180 | "%s setval(pg_get_serial_sequence('%s','%s'), 1, false);" 181 | % ( 182 | style.SQL_KEYWORD("SELECT"), 183 | style.SQL_TABLE(self.quote_name(table_name)), 184 | style.SQL_FIELD(column_name), 185 | ) 186 | ) 187 | return sql 188 | 189 | def tablespace_sql(self, tablespace, inline=False): 190 | if inline: 191 | return "USING INDEX TABLESPACE %s" % self.quote_name(tablespace) 192 | else: 193 | return "TABLESPACE %s" % self.quote_name(tablespace) 194 | 195 | def sequence_reset_sql(self, style, model_list): 196 | from django.db import models 197 | 198 | output = [] 199 | qn = self.quote_name 200 | for model in model_list: 201 | # Use `coalesce` to set the sequence for each model to the max pk 202 | # value if there are records, or 1 if there are none. Set the 203 | # `is_called` property (the third argument to `setval`) to true if 204 | # there are records (as the max pk value is already in use), 205 | # otherwise set it to false. Use pg_get_serial_sequence to get the 206 | # underlying sequence name from the table name and column name. 207 | 208 | for f in model._meta.local_fields: 209 | if isinstance(f, models.AutoField): 210 | output.append( 211 | "%s setval(pg_get_serial_sequence('%s','%s'), " 212 | "coalesce(max(%s), 1), max(%s) %s null) %s %s;" 213 | % ( 214 | style.SQL_KEYWORD("SELECT"), 215 | style.SQL_TABLE(qn(model._meta.db_table)), 216 | style.SQL_FIELD(f.column), 217 | style.SQL_FIELD(qn(f.column)), 218 | style.SQL_FIELD(qn(f.column)), 219 | style.SQL_KEYWORD("IS NOT"), 220 | style.SQL_KEYWORD("FROM"), 221 | style.SQL_TABLE(qn(model._meta.db_table)), 222 | ) 223 | ) 224 | # Only one AutoField is allowed per model, so don't bother 225 | # continuing. 226 | break 227 | return output 228 | 229 | def prep_for_iexact_query(self, x): 230 | return x 231 | 232 | def max_name_length(self): 233 | """ 234 | Return the maximum length of an identifier. 235 | 236 | The maximum length of an identifier is 63 by default, but can be 237 | changed by recompiling PostgreSQL after editing the NAMEDATALEN 238 | macro in src/include/pg_config_manual.h. 239 | 240 | This implementation returns 63, but can be overridden by a custom 241 | database backend that inherits most of its behavior from this one. 242 | """ 243 | return 63 244 | 245 | def distinct_sql(self, fields, params): 246 | if fields: 247 | params = [param for param_list in params for param in param_list] 248 | return (["DISTINCT ON (%s)" % ", ".join(fields)], params) 249 | else: 250 | return ["DISTINCT"], [] 251 | 252 | def last_executed_query(self, cursor, sql, params): 253 | # https://www.psycopg.org/docs/cursor.html#cursor.query 254 | # The query attribute is a Psycopg extension to the DB API 2.0. 255 | if cursor.query is not None: 256 | return cursor.query.decode() 257 | return None 258 | 259 | def return_insert_columns(self, fields): 260 | if not fields: 261 | return "", () 262 | columns = [ 263 | "%s.%s" 264 | % ( 265 | self.quote_name(field.model._meta.db_table), 266 | self.quote_name(field.column), 267 | ) 268 | for field in fields 269 | ] 270 | return "RETURNING %s" % ", ".join(columns), () 271 | 272 | def bulk_insert_sql(self, fields, placeholder_rows): 273 | placeholder_rows_sql = (", ".join(row) for row in placeholder_rows) 274 | values_sql = ", ".join("(%s)" % sql for sql in placeholder_rows_sql) 275 | return "VALUES " + values_sql 276 | 277 | def adapt_datefield_value(self, value): 278 | return value 279 | 280 | def adapt_datetimefield_value(self, value): 281 | return value 282 | 283 | def adapt_timefield_value(self, value): 284 | return value 285 | 286 | def adapt_decimalfield_value(self, value, max_digits=None, decimal_places=None): 287 | return value 288 | 289 | def adapt_ipaddressfield_value(self, value): 290 | if value: 291 | return Inet(value) 292 | return None 293 | 294 | def subtract_temporals(self, internal_type, lhs, rhs): 295 | if internal_type == "DateField": 296 | lhs_sql, lhs_params = lhs 297 | rhs_sql, rhs_params = rhs 298 | params = (*lhs_params, *rhs_params) 299 | return "(interval '1 day' * (%s - %s))" % (lhs_sql, rhs_sql), params 300 | return super().subtract_temporals(internal_type, lhs, rhs) 301 | 302 | def explain_query_prefix(self, format=None, **options): 303 | extra = {} 304 | # Normalize options. 305 | if options: 306 | options = { 307 | name.upper(): "true" if value else "false" 308 | for name, value in options.items() 309 | } 310 | for valid_option in self.explain_options: 311 | value = options.pop(valid_option, None) 312 | if value is not None: 313 | extra[valid_option.upper()] = value 314 | prefix = super().explain_query_prefix(format, **options) 315 | if format: 316 | extra["FORMAT"] = format 317 | if extra: 318 | prefix += " (%s)" % ", ".join("%s %s" % i for i in extra.items()) 319 | return prefix 320 | 321 | def ignore_conflicts_suffix_sql(self, ignore_conflicts=None): 322 | return ( 323 | "ON CONFLICT DO NOTHING" 324 | if ignore_conflicts 325 | else super().ignore_conflicts_suffix_sql(ignore_conflicts) 326 | ) 327 | -------------------------------------------------------------------------------- /tests/test_base.py: -------------------------------------------------------------------------------- 1 | # copy AS-IS from django repository at stable/3.2.x 2 | # https://github.com/django/django/blob/754af45/tests/migrations/test_base.py 3 | import os 4 | import shutil 5 | import tempfile 6 | from contextlib import contextmanager 7 | from importlib import import_module 8 | 9 | from django.apps import apps 10 | from django.db import connection, connections, migrations, models 11 | from django.db.migrations.migration import Migration 12 | from django.db.migrations.recorder import MigrationRecorder 13 | from django.db.migrations.state import ProjectState 14 | from django.test import TransactionTestCase 15 | from django.test.utils import extend_sys_path 16 | from django.utils.module_loading import module_dir 17 | 18 | 19 | class MigrationTestBase(TransactionTestCase): 20 | """ 21 | Contains an extended set of asserts for testing migrations and schema operations. 22 | """ 23 | 24 | available_apps = ["migrations"] 25 | databases = {'default', 'other'} 26 | 27 | def tearDown(self): 28 | # Reset applied-migrations state. 29 | for db in self.databases: 30 | recorder = MigrationRecorder(connections[db]) 31 | recorder.migration_qs.filter(app='migrations').delete() 32 | 33 | def get_table_description(self, table, using='default'): 34 | with connections[using].cursor() as cursor: 35 | return connections[using].introspection.get_table_description(cursor, table) 36 | 37 | def assertTableExists(self, table, using='default'): 38 | with connections[using].cursor() as cursor: 39 | self.assertIn(table, connections[using].introspection.table_names(cursor)) 40 | 41 | def assertTableNotExists(self, table, using='default'): 42 | with connections[using].cursor() as cursor: 43 | self.assertNotIn(table, connections[using].introspection.table_names(cursor)) 44 | 45 | def assertColumnExists(self, table, column, using='default'): 46 | self.assertIn(column, [c.name for c in self.get_table_description(table, using=using)]) 47 | 48 | def assertColumnNotExists(self, table, column, using='default'): 49 | self.assertNotIn(column, [c.name for c in self.get_table_description(table, using=using)]) 50 | 51 | def _get_column_allows_null(self, table, column, using): 52 | return [c.null_ok for c in self.get_table_description(table, using=using) if c.name == column][0] 53 | 54 | def assertColumnNull(self, table, column, using='default'): 55 | self.assertTrue(self._get_column_allows_null(table, column, using)) 56 | 57 | def assertColumnNotNull(self, table, column, using='default'): 58 | self.assertFalse(self._get_column_allows_null(table, column, using)) 59 | 60 | def assertIndexExists(self, table, columns, value=True, using='default', index_type=None): 61 | with connections[using].cursor() as cursor: 62 | self.assertEqual( 63 | value, 64 | any( 65 | c["index"] 66 | for c in connections[using].introspection.get_constraints(cursor, table).values() 67 | if ( 68 | c['columns'] == list(columns) and 69 | (index_type is None or c['type'] == index_type) and 70 | not c['unique'] 71 | ) 72 | ), 73 | ) 74 | 75 | def assertIndexNotExists(self, table, columns): 76 | return self.assertIndexExists(table, columns, False) 77 | 78 | def assertIndexNameExists(self, table, index, using='default'): 79 | with connections[using].cursor() as cursor: 80 | self.assertIn( 81 | index, 82 | connection.introspection.get_constraints(cursor, table), 83 | ) 84 | 85 | def assertIndexNameNotExists(self, table, index, using='default'): 86 | with connections[using].cursor() as cursor: 87 | self.assertNotIn( 88 | index, 89 | connection.introspection.get_constraints(cursor, table), 90 | ) 91 | 92 | def assertConstraintExists(self, table, name, value=True, using='default'): 93 | with connections[using].cursor() as cursor: 94 | constraints = connections[using].introspection.get_constraints(cursor, table).items() 95 | self.assertEqual( 96 | value, 97 | any(c['check'] for n, c in constraints if n == name), 98 | ) 99 | 100 | def assertConstraintNotExists(self, table, name): 101 | return self.assertConstraintExists(table, name, False) 102 | 103 | def assertUniqueConstraintExists(self, table, columns, value=True, using='default'): 104 | with connections[using].cursor() as cursor: 105 | constraints = connections[using].introspection.get_constraints(cursor, table).values() 106 | self.assertEqual( 107 | value, 108 | any(c['unique'] for c in constraints if c['columns'] == list(columns)), 109 | ) 110 | 111 | def assertFKExists(self, table, columns, to, value=True, using='default'): 112 | with connections[using].cursor() as cursor: 113 | self.assertEqual( 114 | value, 115 | any( 116 | c["foreign_key"] == to 117 | for c in connections[using].introspection.get_constraints(cursor, table).values() 118 | if c['columns'] == list(columns) 119 | ), 120 | ) 121 | 122 | def assertFKNotExists(self, table, columns, to): 123 | return self.assertFKExists(table, columns, to, False) 124 | 125 | @contextmanager 126 | def temporary_migration_module(self, app_label='migrations', module=None): 127 | """ 128 | Allows testing management commands in a temporary migrations module. 129 | 130 | Wrap all invocations to makemigrations and squashmigrations with this 131 | context manager in order to avoid creating migration files in your 132 | source tree inadvertently. 133 | 134 | Takes the application label that will be passed to makemigrations or 135 | squashmigrations and the Python path to a migrations module. 136 | 137 | The migrations module is used as a template for creating the temporary 138 | migrations module. If it isn't provided, the application's migrations 139 | module is used, if it exists. 140 | 141 | Returns the filesystem path to the temporary migrations module. 142 | """ 143 | with tempfile.TemporaryDirectory() as temp_dir: 144 | target_dir = tempfile.mkdtemp(dir=temp_dir) 145 | with open(os.path.join(target_dir, '__init__.py'), 'w'): 146 | pass 147 | target_migrations_dir = os.path.join(target_dir, 'migrations') 148 | 149 | if module is None: 150 | module = apps.get_app_config(app_label).name + '.migrations' 151 | 152 | try: 153 | source_migrations_dir = module_dir(import_module(module)) 154 | except (ImportError, ValueError): 155 | pass 156 | else: 157 | shutil.copytree(source_migrations_dir, target_migrations_dir) 158 | 159 | with extend_sys_path(temp_dir): 160 | new_module = os.path.basename(target_dir) + '.migrations' 161 | with self.settings(MIGRATION_MODULES={app_label: new_module}): 162 | yield target_migrations_dir 163 | 164 | 165 | class OperationTestBase(MigrationTestBase): 166 | """Common functions to help test operations.""" 167 | 168 | @classmethod 169 | def setUpClass(cls): 170 | super().setUpClass() 171 | cls._initial_table_names = frozenset(connection.introspection.table_names()) 172 | 173 | def tearDown(self): 174 | self.cleanup_test_tables() 175 | super().tearDown() 176 | 177 | def cleanup_test_tables(self): 178 | table_names = frozenset(connection.introspection.table_names()) - self._initial_table_names 179 | with connection.schema_editor() as editor: 180 | with connection.constraint_checks_disabled(): 181 | for table_name in table_names: 182 | editor.execute(editor.sql_delete_table % { 183 | 'table': editor.quote_name(table_name), 184 | }) 185 | 186 | def apply_operations(self, app_label, project_state, operations, atomic=True): 187 | migration = Migration('name', app_label) 188 | migration.operations = operations 189 | with connection.schema_editor(atomic=atomic) as editor: 190 | return migration.apply(project_state, editor) 191 | 192 | def unapply_operations(self, app_label, project_state, operations, atomic=True): 193 | migration = Migration('name', app_label) 194 | migration.operations = operations 195 | with connection.schema_editor(atomic=atomic) as editor: 196 | return migration.unapply(project_state, editor) 197 | 198 | def make_test_state(self, app_label, operation, **kwargs): 199 | """ 200 | Makes a test state using set_up_test_model and returns the 201 | original state and the state after the migration is applied. 202 | """ 203 | project_state = self.set_up_test_model(app_label, **kwargs) 204 | new_state = project_state.clone() 205 | operation.state_forwards(app_label, new_state) 206 | return project_state, new_state 207 | 208 | def set_up_test_model( 209 | self, app_label, second_model=False, third_model=False, index=False, 210 | multicol_index=False, related_model=False, mti_model=False, 211 | proxy_model=False, manager_model=False, unique_together=False, 212 | options=False, db_table=None, index_together=False, constraints=None, 213 | indexes=None, 214 | ): 215 | """Creates a test model state and database table.""" 216 | # Make the "current" state. 217 | model_options = { 218 | 'swappable': 'TEST_SWAP_MODEL', 219 | 'unique_together': [['pink', 'weight']] if unique_together else [], 220 | } 221 | if options: 222 | model_options['permissions'] = [('can_groom', 'Can groom')] 223 | if db_table: 224 | model_options['db_table'] = db_table 225 | operations = [migrations.CreateModel( 226 | 'Pony', 227 | [ 228 | ('id', models.AutoField(primary_key=True)), 229 | ('pink', models.IntegerField(default=3)), 230 | ('weight', models.FloatField()), 231 | ], 232 | options=model_options, 233 | )] 234 | if index: 235 | operations.append(migrations.AddIndex( 236 | 'Pony', 237 | models.Index(fields=['pink'], name='pony_pink_idx'), 238 | )) 239 | if multicol_index: 240 | operations.append(migrations.AddIndex( 241 | 'Pony', 242 | models.Index(fields=['pink', 'weight'], name='pony_test_idx'), 243 | )) 244 | if indexes: 245 | for index in indexes: 246 | operations.append(migrations.AddIndex('Pony', index)) 247 | if constraints: 248 | for constraint in constraints: 249 | operations.append(migrations.AddConstraint('Pony', constraint)) 250 | if second_model: 251 | operations.append(migrations.CreateModel( 252 | 'Stable', 253 | [ 254 | ('id', models.AutoField(primary_key=True)), 255 | ] 256 | )) 257 | if third_model: 258 | operations.append(migrations.CreateModel( 259 | 'Van', 260 | [ 261 | ('id', models.AutoField(primary_key=True)), 262 | ] 263 | )) 264 | if related_model: 265 | operations.append(migrations.CreateModel( 266 | 'Rider', 267 | [ 268 | ('id', models.AutoField(primary_key=True)), 269 | ('pony', models.ForeignKey('Pony', models.CASCADE)), 270 | ('friend', models.ForeignKey('self', models.CASCADE, null=True)) 271 | ], 272 | )) 273 | if mti_model: 274 | operations.append(migrations.CreateModel( 275 | 'ShetlandPony', 276 | fields=[ 277 | ('pony_ptr', models.OneToOneField( 278 | 'Pony', 279 | models.CASCADE, 280 | auto_created=True, 281 | parent_link=True, 282 | primary_key=True, 283 | to_field='id', 284 | serialize=False, 285 | )), 286 | ('cuteness', models.IntegerField(default=1)), 287 | ], 288 | bases=['%s.Pony' % app_label], 289 | )) 290 | if proxy_model: 291 | operations.append(migrations.CreateModel( 292 | 'ProxyPony', 293 | fields=[], 294 | options={'proxy': True}, 295 | bases=['%s.Pony' % app_label], 296 | )) 297 | if manager_model: 298 | from .models import FoodManager, FoodQuerySet 299 | operations.append(migrations.CreateModel( 300 | 'Food', 301 | fields=[ 302 | ('id', models.AutoField(primary_key=True)), 303 | ], 304 | managers=[ 305 | ('food_qs', FoodQuerySet.as_manager()), 306 | ('food_mgr', FoodManager('a', 'b')), 307 | ('food_mgr_kwargs', FoodManager('x', 'y', 3, 4)), 308 | ] 309 | )) 310 | return self.apply_operations(app_label, ProjectState(), operations) 311 | -------------------------------------------------------------------------------- /django_redshift_backend/_vendor/django40/db/backends/postgresql/base.py: -------------------------------------------------------------------------------- 1 | """ 2 | PostgreSQL database backend for Django. 3 | 4 | Requires psycopg 2: https://www.psycopg.org/ 5 | """ 6 | 7 | import asyncio 8 | import threading 9 | import warnings 10 | from contextlib import contextmanager 11 | 12 | from django.conf import settings 13 | from django.core.exceptions import ImproperlyConfigured 14 | from django.db import DatabaseError as WrappedDatabaseError 15 | from django.db import connections 16 | from django_redshift_backend._vendor.django40.db.backends.base.base import BaseDatabaseWrapper 17 | from django_redshift_backend._vendor.django40.db.backends.utils import CursorDebugWrapper as BaseCursorDebugWrapper 18 | from django.utils.asyncio import async_unsafe 19 | from django.utils.functional import cached_property 20 | from django.utils.safestring import SafeString 21 | from django.utils.version import get_version_tuple 22 | 23 | try: 24 | import psycopg2 as Database 25 | import psycopg2.extensions 26 | import psycopg2.extras 27 | except ImportError as e: 28 | raise ImproperlyConfigured("Error loading psycopg2 module: %s" % e) 29 | 30 | 31 | def psycopg2_version(): 32 | version = psycopg2.__version__.split(" ", 1)[0] 33 | return get_version_tuple(version) 34 | 35 | 36 | PSYCOPG2_VERSION = psycopg2_version() 37 | 38 | if PSYCOPG2_VERSION < (2, 8, 4): 39 | raise ImproperlyConfigured( 40 | "psycopg2 version 2.8.4 or newer is required; you have %s" 41 | % psycopg2.__version__ 42 | ) 43 | 44 | 45 | # Some of these import psycopg2, so import them after checking if it's installed. 46 | from .client import DatabaseClient # NOQA 47 | from .creation import DatabaseCreation # NOQA 48 | from .features import DatabaseFeatures # NOQA 49 | from .introspection import DatabaseIntrospection # NOQA 50 | from .operations import DatabaseOperations # NOQA 51 | from .schema import DatabaseSchemaEditor # NOQA 52 | 53 | psycopg2.extensions.register_adapter(SafeString, psycopg2.extensions.QuotedString) 54 | psycopg2.extras.register_uuid() 55 | 56 | # Register support for inet[] manually so we don't have to handle the Inet() 57 | # object on load all the time. 58 | INETARRAY_OID = 1041 59 | INETARRAY = psycopg2.extensions.new_array_type( 60 | (INETARRAY_OID,), 61 | "INETARRAY", 62 | psycopg2.extensions.UNICODE, 63 | ) 64 | psycopg2.extensions.register_type(INETARRAY) 65 | 66 | 67 | class DatabaseWrapper(BaseDatabaseWrapper): 68 | vendor = "postgresql" 69 | display_name = "PostgreSQL" 70 | # This dictionary maps Field objects to their associated PostgreSQL column 71 | # types, as strings. Column-type strings can contain format strings; they'll 72 | # be interpolated against the values of Field.__dict__ before being output. 73 | # If a column type is set to None, it won't be included in the output. 74 | data_types = { 75 | "AutoField": "serial", 76 | "BigAutoField": "bigserial", 77 | "BinaryField": "bytea", 78 | "BooleanField": "boolean", 79 | "CharField": "varchar(%(max_length)s)", 80 | "DateField": "date", 81 | "DateTimeField": "timestamp with time zone", 82 | "DecimalField": "numeric(%(max_digits)s, %(decimal_places)s)", 83 | "DurationField": "interval", 84 | "FileField": "varchar(%(max_length)s)", 85 | "FilePathField": "varchar(%(max_length)s)", 86 | "FloatField": "double precision", 87 | "IntegerField": "integer", 88 | "BigIntegerField": "bigint", 89 | "IPAddressField": "inet", 90 | "GenericIPAddressField": "inet", 91 | "JSONField": "jsonb", 92 | "OneToOneField": "integer", 93 | "PositiveBigIntegerField": "bigint", 94 | "PositiveIntegerField": "integer", 95 | "PositiveSmallIntegerField": "smallint", 96 | "SlugField": "varchar(%(max_length)s)", 97 | "SmallAutoField": "smallserial", 98 | "SmallIntegerField": "smallint", 99 | "TextField": "text", 100 | "TimeField": "time", 101 | "UUIDField": "uuid", 102 | } 103 | data_type_check_constraints = { 104 | "PositiveBigIntegerField": '"%(column)s" >= 0', 105 | "PositiveIntegerField": '"%(column)s" >= 0', 106 | "PositiveSmallIntegerField": '"%(column)s" >= 0', 107 | } 108 | operators = { 109 | "exact": "= %s", 110 | "iexact": "= UPPER(%s)", 111 | "contains": "LIKE %s", 112 | "icontains": "LIKE UPPER(%s)", 113 | "regex": "~ %s", 114 | "iregex": "~* %s", 115 | "gt": "> %s", 116 | "gte": ">= %s", 117 | "lt": "< %s", 118 | "lte": "<= %s", 119 | "startswith": "LIKE %s", 120 | "endswith": "LIKE %s", 121 | "istartswith": "LIKE UPPER(%s)", 122 | "iendswith": "LIKE UPPER(%s)", 123 | } 124 | 125 | # The patterns below are used to generate SQL pattern lookup clauses when 126 | # the right-hand side of the lookup isn't a raw string (it might be an expression 127 | # or the result of a bilateral transformation). 128 | # In those cases, special characters for LIKE operators (e.g. \, *, _) should be 129 | # escaped on database side. 130 | # 131 | # Note: we use str.format() here for readability as '%' is used as a wildcard for 132 | # the LIKE operator. 133 | pattern_esc = ( 134 | r"REPLACE(REPLACE(REPLACE({}, E'\\', E'\\\\'), E'%%', E'\\%%'), E'_', E'\\_')" 135 | ) 136 | pattern_ops = { 137 | "contains": "LIKE '%%' || {} || '%%'", 138 | "icontains": "LIKE '%%' || UPPER({}) || '%%'", 139 | "startswith": "LIKE {} || '%%'", 140 | "istartswith": "LIKE UPPER({}) || '%%'", 141 | "endswith": "LIKE '%%' || {}", 142 | "iendswith": "LIKE '%%' || UPPER({})", 143 | } 144 | 145 | Database = Database 146 | SchemaEditorClass = DatabaseSchemaEditor 147 | # Classes instantiated in __init__(). 148 | client_class = DatabaseClient 149 | creation_class = DatabaseCreation 150 | features_class = DatabaseFeatures 151 | introspection_class = DatabaseIntrospection 152 | ops_class = DatabaseOperations 153 | # PostgreSQL backend-specific attributes. 154 | _named_cursor_idx = 0 155 | 156 | def get_connection_params(self): 157 | settings_dict = self.settings_dict 158 | # None may be used to connect to the default 'postgres' db 159 | if settings_dict["NAME"] == "" and not settings_dict.get("OPTIONS", {}).get( 160 | "service" 161 | ): 162 | raise ImproperlyConfigured( 163 | "settings.DATABASES is improperly configured. " 164 | "Please supply the NAME or OPTIONS['service'] value." 165 | ) 166 | if len(settings_dict["NAME"] or "") > self.ops.max_name_length(): 167 | raise ImproperlyConfigured( 168 | "The database name '%s' (%d characters) is longer than " 169 | "PostgreSQL's limit of %d characters. Supply a shorter NAME " 170 | "in settings.DATABASES." 171 | % ( 172 | settings_dict["NAME"], 173 | len(settings_dict["NAME"]), 174 | self.ops.max_name_length(), 175 | ) 176 | ) 177 | conn_params = {} 178 | if settings_dict["NAME"]: 179 | conn_params = { 180 | "database": settings_dict["NAME"], 181 | **settings_dict["OPTIONS"], 182 | } 183 | elif settings_dict["NAME"] is None: 184 | # Connect to the default 'postgres' db. 185 | settings_dict.get("OPTIONS", {}).pop("service", None) 186 | conn_params = {"database": "postgres", **settings_dict["OPTIONS"]} 187 | else: 188 | conn_params = {**settings_dict["OPTIONS"]} 189 | 190 | conn_params.pop("isolation_level", None) 191 | if settings_dict["USER"]: 192 | conn_params["user"] = settings_dict["USER"] 193 | if settings_dict["PASSWORD"]: 194 | conn_params["password"] = settings_dict["PASSWORD"] 195 | if settings_dict["HOST"]: 196 | conn_params["host"] = settings_dict["HOST"] 197 | if settings_dict["PORT"]: 198 | conn_params["port"] = settings_dict["PORT"] 199 | return conn_params 200 | 201 | @async_unsafe 202 | def get_new_connection(self, conn_params): 203 | connection = Database.connect(**conn_params) 204 | 205 | # self.isolation_level must be set: 206 | # - after connecting to the database in order to obtain the database's 207 | # default when no value is explicitly specified in options. 208 | # - before calling _set_autocommit() because if autocommit is on, that 209 | # will set connection.isolation_level to ISOLATION_LEVEL_AUTOCOMMIT. 210 | options = self.settings_dict["OPTIONS"] 211 | try: 212 | self.isolation_level = options["isolation_level"] 213 | except KeyError: 214 | self.isolation_level = connection.isolation_level 215 | else: 216 | # Set the isolation level to the value from OPTIONS. 217 | if self.isolation_level != connection.isolation_level: 218 | connection.set_session(isolation_level=self.isolation_level) 219 | # Register dummy loads() to avoid a round trip from psycopg2's decode 220 | # to json.dumps() to json.loads(), when using a custom decoder in 221 | # JSONField. 222 | psycopg2.extras.register_default_jsonb( 223 | conn_or_curs=connection, loads=lambda x: x 224 | ) 225 | return connection 226 | 227 | def ensure_timezone(self): 228 | if self.connection is None: 229 | return False 230 | conn_timezone_name = self.connection.get_parameter_status("TimeZone") 231 | timezone_name = self.timezone_name 232 | if timezone_name and conn_timezone_name != timezone_name: 233 | with self.connection.cursor() as cursor: 234 | cursor.execute(self.ops.set_time_zone_sql(), [timezone_name]) 235 | return True 236 | return False 237 | 238 | def init_connection_state(self): 239 | self.connection.set_client_encoding("UTF8") 240 | 241 | timezone_changed = self.ensure_timezone() 242 | if timezone_changed: 243 | # Commit after setting the time zone (see #17062) 244 | if not self.get_autocommit(): 245 | self.connection.commit() 246 | 247 | @async_unsafe 248 | def create_cursor(self, name=None): 249 | if name: 250 | # In autocommit mode, the cursor will be used outside of a 251 | # transaction, hence use a holdable cursor. 252 | cursor = self.connection.cursor( 253 | name, scrollable=False, withhold=self.connection.autocommit 254 | ) 255 | else: 256 | cursor = self.connection.cursor() 257 | cursor.tzinfo_factory = self.tzinfo_factory if settings.USE_TZ else None 258 | return cursor 259 | 260 | def tzinfo_factory(self, offset): 261 | return self.timezone 262 | 263 | @async_unsafe 264 | def chunked_cursor(self): 265 | self._named_cursor_idx += 1 266 | # Get the current async task 267 | # Note that right now this is behind @async_unsafe, so this is 268 | # unreachable, but in future we'll start loosening this restriction. 269 | # For now, it's here so that every use of "threading" is 270 | # also async-compatible. 271 | try: 272 | current_task = asyncio.current_task() 273 | except RuntimeError: 274 | current_task = None 275 | # Current task can be none even if the current_task call didn't error 276 | if current_task: 277 | task_ident = str(id(current_task)) 278 | else: 279 | task_ident = "sync" 280 | # Use that and the thread ident to get a unique name 281 | return self._cursor( 282 | name="_django_curs_%d_%s_%d" 283 | % ( 284 | # Avoid reusing name in other threads / tasks 285 | threading.current_thread().ident, 286 | task_ident, 287 | self._named_cursor_idx, 288 | ) 289 | ) 290 | 291 | def _set_autocommit(self, autocommit): 292 | with self.wrap_database_errors: 293 | self.connection.autocommit = autocommit 294 | 295 | def check_constraints(self, table_names=None): 296 | """ 297 | Check constraints by setting them to immediate. Return them to deferred 298 | afterward. 299 | """ 300 | with self.cursor() as cursor: 301 | cursor.execute("SET CONSTRAINTS ALL IMMEDIATE") 302 | cursor.execute("SET CONSTRAINTS ALL DEFERRED") 303 | 304 | def is_usable(self): 305 | try: 306 | # Use a psycopg cursor directly, bypassing Django's utilities. 307 | with self.connection.cursor() as cursor: 308 | cursor.execute("SELECT 1") 309 | except Database.Error: 310 | return False 311 | else: 312 | return True 313 | 314 | @contextmanager 315 | def _nodb_cursor(self): 316 | cursor = None 317 | try: 318 | with super()._nodb_cursor() as cursor: 319 | yield cursor 320 | except (Database.DatabaseError, WrappedDatabaseError): 321 | if cursor is not None: 322 | raise 323 | warnings.warn( 324 | "Normally Django will use a connection to the 'postgres' database " 325 | "to avoid running initialization queries against the production " 326 | "database when it's not needed (for example, when running tests). " 327 | "Django was unable to create a connection to the 'postgres' database " 328 | "and will use the first PostgreSQL database instead.", 329 | RuntimeWarning, 330 | ) 331 | for connection in connections.all(): 332 | if ( 333 | connection.vendor == "postgresql" 334 | and connection.settings_dict["NAME"] != "postgres" 335 | ): 336 | conn = self.__class__( 337 | { 338 | **self.settings_dict, 339 | "NAME": connection.settings_dict["NAME"], 340 | }, 341 | alias=self.alias, 342 | ) 343 | try: 344 | with conn.cursor() as cursor: 345 | yield cursor 346 | finally: 347 | conn.close() 348 | break 349 | else: 350 | raise 351 | 352 | @cached_property 353 | def pg_version(self): 354 | with self.temporary_connection(): 355 | return self.connection.server_version 356 | 357 | def make_debug_cursor(self, cursor): 358 | return CursorDebugWrapper(cursor, self) 359 | 360 | 361 | class CursorDebugWrapper(BaseCursorDebugWrapper): 362 | def copy_expert(self, sql, file, *args): 363 | with self.debug_sql(sql): 364 | return self.cursor.copy_expert(sql, file, *args) 365 | 366 | def copy_to(self, file, table, *args, **kwargs): 367 | with self.debug_sql(sql="COPY %s TO STDOUT" % table): 368 | return self.cursor.copy_to(file, table, *args, **kwargs) 369 | --------------------------------------------------------------------------------