├── .circleci └── config.yml ├── .editorconfig ├── .gitignore ├── .readthedocs.yml ├── CHANGELOG.md ├── CONTRIBUTING.md ├── LICENSE ├── Makefile ├── README.md ├── devops.py ├── docker-compose.yml ├── docs ├── advanced_db.md ├── advanced_installation.md ├── basics.md ├── commands.md ├── conditional.md ├── contributing.md ├── cookbook.md ├── css │ ├── mkdocs-material.css │ └── mkdocstrings.css ├── deferrable.md ├── faq.md ├── further_reading.md ├── ignoring_triggers.md ├── index.md ├── installation.md ├── module.md ├── overrides │ └── partials │ │ └── copyright.html ├── release_notes.md ├── requirements.txt ├── settings.md ├── statement.md ├── static │ ├── dark_logo.png │ └── light_logo.png ├── troubleshooting.md └── upgrading.md ├── environment.yml ├── footing.yaml ├── manage.py ├── mkdocs.yml ├── pgtrigger ├── __init__.py ├── apps.py ├── compiler.py ├── contrib.py ├── core.py ├── features.py ├── installation.py ├── management │ ├── __init__.py │ └── commands │ │ ├── __init__.py │ │ └── pgtrigger.py ├── migrations.py ├── models.py ├── py.typed ├── registry.py ├── runtime.py ├── tests │ ├── __init__.py │ ├── apps.py │ ├── conftest.py │ ├── migrations │ │ ├── 0001_initial.py │ │ ├── 0002_logentry_tologmodel.py │ │ ├── 0003_auto_20200718_0938.py │ │ ├── 0004_fsm.py │ │ ├── 0005_customsoftdelete.py │ │ ├── 0006_customtablename.py │ │ ├── 0007_auto_20220808_1055.py │ │ ├── 0008_searchmodel.py │ │ ├── 0009_orderschema_receiptschema.py │ │ ├── 0010_auto_20220817_2211.py │ │ ├── 0011_auto_20220817_2211.py │ │ ├── 0012_alter_partitionmodel_options.py │ │ ├── 0013_alter_testtrigger_m2m_field_changedcondition.py │ │ ├── 0014_softdeletecompositepk.py │ │ ├── 0015_concretechild_abstractchild.py │ │ └── __init__.py │ ├── models.py │ ├── syncdb_app │ │ ├── __init__.py │ │ ├── apps.py │ │ └── models.py │ ├── test_commands.py │ ├── test_contrib.py │ ├── test_core.py │ ├── test_migrations.py │ ├── test_multi_db.py │ ├── test_multi_schema.py │ ├── test_registry.py │ ├── test_runtime.py │ ├── test_syncdb.py │ └── utils.py ├── utils.py └── version.py ├── poetry.lock ├── pyproject.toml ├── settings.py ├── test_settings.py └── tox.ini /.circleci/config.yml: -------------------------------------------------------------------------------- 1 | version: 2.1 2 | 3 | orbs: 4 | ambitioneng: 5 | executors: 6 | python: 7 | working_directory: /code 8 | docker: 9 | - image: opus10/circleci-python-library:2024-10-26 10 | environment: 11 | # Ensure makefile commands are not wrapped in "docker compose run" 12 | EXEC_WRAPPER: '' 13 | DATABASE_URL: postgres://root@localhost/circle_test?sslmode=disable 14 | - image: cimg/postgres:<> 15 | environment: 16 | POSTGRES_USER: root 17 | POSTGRES_DB: circle_test 18 | POSTGRES_PASSWORD: password 19 | parameters: 20 | pg_version: 21 | type: "string" 22 | default: "14.17" 23 | commands: 24 | test: 25 | steps: 26 | - checkout 27 | - restore_cache: 28 | key: v5-{{ checksum "poetry.lock" }} 29 | - run: make dependencies 30 | - run: make full-test-suite 31 | - save_cache: 32 | key: v5-{{ checksum "poetry.lock" }} 33 | paths: 34 | - /home/circleci/.cache/pypoetry/ 35 | - /code/.venv 36 | - /code/.tox 37 | 38 | jobs: 39 | test_pg_min: 40 | executor: 41 | name: ambitioneng/python 42 | pg_version: "14.17" 43 | steps: 44 | - ambitioneng/test 45 | 46 | test_pg_max: 47 | executor: 48 | name: ambitioneng/python 49 | pg_version: "17.4" 50 | steps: 51 | - ambitioneng/test 52 | 53 | lint: 54 | executor: ambitioneng/python 55 | steps: 56 | - checkout 57 | - restore_cache: 58 | key: v5-{{ checksum "poetry.lock" }} 59 | - run: make dependencies 60 | - run: make lint 61 | 62 | type_check: 63 | executor: ambitioneng/python 64 | steps: 65 | - checkout 66 | - restore_cache: 67 | key: v5-{{ checksum "poetry.lock" }} 68 | - run: make dependencies 69 | - run: make type-check || true 70 | 71 | deploy: 72 | executor: ambitioneng/python 73 | steps: 74 | - checkout 75 | - run: ssh-add -D 76 | - restore_cache: 77 | key: v5-{{ checksum "poetry.lock" }} 78 | - run: make dependencies 79 | - run: poetry run python devops.py deploy 80 | 81 | workflows: 82 | version: 2 83 | on_commit: 84 | jobs: 85 | - test_pg_min: 86 | filters: 87 | tags: 88 | only: /.*/ 89 | - test_pg_max: 90 | filters: 91 | tags: 92 | only: /.*/ 93 | - lint: 94 | filters: 95 | tags: 96 | only: /.*/ 97 | - type_check: 98 | filters: 99 | tags: 100 | only: /.*/ 101 | - deploy: 102 | context: python-library 103 | requires: 104 | - test_pg_min 105 | - test_pg_max 106 | - lint 107 | - type_check 108 | filters: 109 | branches: 110 | ignore: /.*/ 111 | tags: 112 | only: /.*/ 113 | -------------------------------------------------------------------------------- /.editorconfig: -------------------------------------------------------------------------------- 1 | root = true 2 | 3 | [*] 4 | charset = utf-8 5 | end_of_line = lf 6 | indent_size = 4 7 | indent_style = space 8 | insert_final_newline = true 9 | trim_trailing_whitespace = true 10 | 11 | [*.{yaml,yml}] 12 | indent_size = 2 13 | 14 | [makefile] 15 | indent_style = tab 16 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | 2 | # Created by https://www.gitignore.io/api/vim,osx,python,django,pycharm,komodoedit,elasticbeanstalk,visualstudiocode 3 | # Edit at https://www.gitignore.io/?templates=vim,osx,python,django,pycharm,komodoedit,elasticbeanstalk,visualstudiocode 4 | 5 | ### Django ### 6 | *.log 7 | *.pot 8 | *.pyc 9 | __pycache__/ 10 | local_settings.py 11 | db.sqlite3 12 | media 13 | 14 | # If your build process includes running collectstatic, then you probably don't need or want to include staticfiles/ 15 | # in your Git repository. Update and uncomment the following line accordingly. 16 | # /staticfiles/ 17 | 18 | ### Django.Python Stack ### 19 | # Byte-compiled / optimized / DLL files 20 | *.py[cod] 21 | *$py.class 22 | 23 | # C extensions 24 | *.so 25 | 26 | # Distribution / packaging 27 | .Python 28 | build/ 29 | develop-eggs/ 30 | dist/ 31 | downloads/ 32 | eggs/ 33 | .eggs/ 34 | lib/ 35 | lib64/ 36 | parts/ 37 | sdist/ 38 | var/ 39 | wheels/ 40 | pip-wheel-metadata/ 41 | share/python-wheels/ 42 | *.egg-info/ 43 | .installed.cfg 44 | *.egg 45 | MANIFEST 46 | 47 | # PyInstaller 48 | # Usually these files are written by a python script from a template 49 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 50 | *.manifest 51 | *.spec 52 | 53 | # Installer logs 54 | pip-log.txt 55 | pip-delete-this-directory.txt 56 | 57 | # Unit test / coverage reports 58 | htmlcov/ 59 | .tox/ 60 | .nox/ 61 | .coverage 62 | .coverage.* 63 | .cache 64 | nosetests.xml 65 | coverage.xml 66 | *.cover 67 | .hypothesis/ 68 | .pytest_cache/ 69 | 70 | # Translations 71 | *.mo 72 | 73 | # Django stuff: 74 | db.sqlite3-journal 75 | 76 | # Flask stuff: 77 | instance/ 78 | .webassets-cache 79 | 80 | # Scrapy stuff: 81 | .scrapy 82 | 83 | # PyBuilder 84 | target/ 85 | 86 | # Jupyter Notebook 87 | .ipynb_checkpoints 88 | 89 | # IPython 90 | profile_default/ 91 | ipython_config.py 92 | 93 | # pyenv 94 | .python-version 95 | 96 | # pipenv 97 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 98 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 99 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 100 | # install all needed dependencies. 101 | #Pipfile.lock 102 | 103 | # celery beat schedule file 104 | celerybeat-schedule 105 | 106 | # SageMath parsed files 107 | *.sage.py 108 | 109 | # Environments 110 | .env 111 | .venv 112 | env/ 113 | venv/ 114 | ENV/ 115 | env.bak/ 116 | venv.bak/ 117 | 118 | # Spyder project settings 119 | .spyderproject 120 | .spyproject 121 | 122 | # Rope project settings 123 | .ropeproject 124 | 125 | # mkdocs documentation 126 | /site 127 | 128 | # mypy 129 | .mypy_cache/ 130 | .dmypy.json 131 | dmypy.json 132 | 133 | # Pyre type checker 134 | .pyre/ 135 | 136 | ### ElasticBeanstalk ### 137 | .elasticbeanstalk/ 138 | 139 | ### KomodoEdit ### 140 | *.komodoproject 141 | .komodotools 142 | 143 | ### OSX ### 144 | # General 145 | .DS_Store 146 | .AppleDouble 147 | .LSOverride 148 | 149 | # Icon must end with two \r 150 | Icon 151 | 152 | # Thumbnails 153 | ._* 154 | 155 | # Files that might appear in the root of a volume 156 | .DocumentRevisions-V100 157 | .fseventsd 158 | .Spotlight-V100 159 | .TemporaryItems 160 | .Trashes 161 | .VolumeIcon.icns 162 | .com.apple.timemachine.donotpresent 163 | 164 | # Directories potentially created on remote AFP share 165 | .AppleDB 166 | .AppleDesktop 167 | Network Trash Folder 168 | Temporary Items 169 | .apdisk 170 | 171 | ### PyCharm ### 172 | # Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio and WebStorm 173 | # Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839 174 | 175 | # User-specific stuff 176 | .idea/**/workspace.xml 177 | .idea/**/tasks.xml 178 | .idea/**/usage.statistics.xml 179 | .idea/**/dictionaries 180 | .idea/**/shelf 181 | 182 | # Generated files 183 | .idea/**/contentModel.xml 184 | 185 | # Sensitive or high-churn files 186 | .idea/**/dataSources/ 187 | .idea/**/dataSources.ids 188 | .idea/**/dataSources.local.xml 189 | .idea/**/sqlDataSources.xml 190 | .idea/**/dynamic.xml 191 | .idea/**/uiDesigner.xml 192 | .idea/**/dbnavigator.xml 193 | 194 | # Gradle 195 | .idea/**/gradle.xml 196 | .idea/**/libraries 197 | 198 | # Gradle and Maven with auto-import 199 | # When using Gradle or Maven with auto-import, you should exclude module files, 200 | # since they will be recreated, and may cause churn. Uncomment if using 201 | # auto-import. 202 | # .idea/modules.xml 203 | # .idea/*.iml 204 | # .idea/modules 205 | # *.iml 206 | # *.ipr 207 | 208 | # CMake 209 | cmake-build-*/ 210 | 211 | # Mongo Explorer plugin 212 | .idea/**/mongoSettings.xml 213 | 214 | # File-based project format 215 | *.iws 216 | 217 | # IntelliJ 218 | out/ 219 | 220 | # mpeltonen/sbt-idea plugin 221 | .idea_modules/ 222 | 223 | # JIRA plugin 224 | atlassian-ide-plugin.xml 225 | 226 | # Cursive Clojure plugin 227 | .idea/replstate.xml 228 | 229 | # Crashlytics plugin (for Android Studio and IntelliJ) 230 | com_crashlytics_export_strings.xml 231 | crashlytics.properties 232 | crashlytics-build.properties 233 | fabric.properties 234 | 235 | # Editor-based Rest Client 236 | .idea/httpRequests 237 | 238 | # Android studio 3.1+ serialized cache file 239 | .idea/caches/build_file_checksums.ser 240 | 241 | ### PyCharm Patch ### 242 | # Comment Reason: https://github.com/joeblau/gitignore.io/issues/186#issuecomment-215987721 243 | 244 | # *.iml 245 | # modules.xml 246 | # .idea/misc.xml 247 | # *.ipr 248 | 249 | # Sonarlint plugin 250 | .idea/sonarlint 251 | 252 | ### Python ### 253 | # Byte-compiled / optimized / DLL files 254 | 255 | # C extensions 256 | 257 | # Distribution / packaging 258 | 259 | # PyInstaller 260 | # Usually these files are written by a python script from a template 261 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 262 | 263 | # Installer logs 264 | 265 | # Unit test / coverage reports 266 | 267 | # Translations 268 | 269 | # Django stuff: 270 | 271 | # Flask stuff: 272 | 273 | # Scrapy stuff: 274 | 275 | # Sphinx documentation 276 | 277 | # PyBuilder 278 | 279 | # Jupyter Notebook 280 | 281 | # IPython 282 | 283 | # pyenv 284 | 285 | # pipenv 286 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 287 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 288 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 289 | # install all needed dependencies. 290 | 291 | # celery beat schedule file 292 | 293 | # SageMath parsed files 294 | 295 | # Environments 296 | 297 | # Spyder project settings 298 | 299 | # Rope project settings 300 | 301 | # mkdocs documentation 302 | 303 | # mypy 304 | 305 | # Pyre type checker 306 | 307 | ### Vim ### 308 | # Swap 309 | [._]*.s[a-v][a-z] 310 | [._]*.sw[a-p] 311 | [._]s[a-rt-v][a-z] 312 | [._]ss[a-gi-z] 313 | [._]sw[a-p] 314 | 315 | # Session 316 | Session.vim 317 | Sessionx.vim 318 | 319 | # Temporary 320 | .netrwhist 321 | *~ 322 | # Auto-generated tag files 323 | tags 324 | # Persistent undo 325 | [._]*.un~ 326 | 327 | ### VisualStudioCode ### 328 | .vscode/* 329 | !.vscode/settings.json 330 | !.vscode/tasks.json 331 | !.vscode/launch.json 332 | !.vscode/extensions.json 333 | 334 | ### VisualStudioCode Patch ### 335 | # Ignore all local history of files 336 | .history 337 | 338 | # End of https://www.gitignore.io/api/vim,osx,python,django,pycharm,komodoedit,elasticbeanstalk,visualstudiocode 339 | 340 | # Ignore custom Docker compose DB data 341 | .db 342 | 343 | # Ignore PyCharm 344 | .idea/ 345 | 346 | # Ignore local poetry settings 347 | poetry.toml 348 | 349 | # Ignore PyCharm idea folder 350 | .idea 351 | -------------------------------------------------------------------------------- /.readthedocs.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | build: 3 | os: ubuntu-22.04 4 | tools: 5 | python: "3.9" 6 | mkdocs: 7 | configuration: mkdocs.yml 8 | fail_on_warning: false 9 | formats: all 10 | python: 11 | install: 12 | - requirements: docs/requirements.txt 13 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing Guide 2 | 3 | This project was created using footing. For more information about footing, go to the [footing docs](https://github.com/AmbitionEng/footing). 4 | 5 | ## Setup 6 | 7 | Set up your development environment with: 8 | 9 | git clone git@github.com:AmbitionEng/django-pgtrigger.git 10 | cd django-pgtrigger 11 | make docker-setup 12 | 13 | `make docker-setup` will set up a development environment managed by Docker. Install docker [here](https://www.docker.com/get-started) and be sure it is running when executing any of the commands below. 14 | 15 | If you prefer a native development environment, `make conda-setup` will set up a development environment managed by [Conda](https://conda.io). Dependent services, such as databases, must be ran manually. 16 | 17 | ## Testing and Validation 18 | 19 | Run the tests on one Python version with: 20 | 21 | make test 22 | 23 | Run the full test suite against all supported Python versions with: 24 | 25 | make full-test-suite 26 | 27 | Validate the code with: 28 | 29 | make lint 30 | 31 | If your code fails the linter checks, fix common errors with: 32 | 33 | make lint-fix 34 | 35 | ## Documentation 36 | 37 | [Mkdocs Material](https://squidfunk.github.io/mkdocs-material/) documentation can be built with: 38 | 39 | make docs 40 | 41 | A shortcut for serving them is: 42 | 43 | make docs-serve 44 | 45 | ## Releases and Versioning 46 | 47 | The version number and release notes are manually updated by the maintainer during the release process. Do not edit these. -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright (c) 2025, Ambition 2 | All rights reserved. 3 | 4 | Redistribution and use in source and binary forms, with or without 5 | modification, are permitted provided that the following conditions are met: 6 | 7 | * Redistributions of source code must retain the above copyright 8 | notice, this list of conditions and the following disclaimer. 9 | 10 | * Redistributions in binary form must reproduce the above copyright 11 | notice, this list of conditions and the following disclaimer in the 12 | documentation and/or other materials provided with the distribution. 13 | 14 | * Neither the name of the copyright holder nor the names of its 15 | contributors may be used to endorse or promote products derived from 16 | this software without specific prior written permission. 17 | 18 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND 19 | ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED 20 | WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 21 | DISCLAIMED. IN NO EVENT SHALL AMBITION BE LIABLE FOR ANY 22 | DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES 23 | (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; 24 | LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND 25 | ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 26 | (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS 27 | SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 28 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | # Makefile for packaging and testing django-pgtrigger 2 | # 3 | # This Makefile has the following targets: 4 | # 5 | # setup - Sets up the development environment 6 | # dependencies - Installs dependencies 7 | # docs - Build documentation 8 | # docs-serve - Serve documentation 9 | # lint - Run code linting and static checks 10 | # lint-fix - Fix common linting errors 11 | # type-check - Run Pyright type-checking 12 | # test - Run tests using pytest 13 | # full-test-suite - Run full test suite using tox 14 | # shell - Run a shell in a virtualenv 15 | # docker-teardown - Spin down docker resources 16 | 17 | OS = $(shell uname -s) 18 | 19 | PACKAGE_NAME=django-pgtrigger 20 | MODULE_NAME=pgtrigger 21 | SHELL=bash 22 | DATABASE_URL?=postgres://postgres:postgres@db:5432/postgres 23 | 24 | ifeq (${OS}, Linux) 25 | DOCKER_CMD?=sudo docker 26 | DOCKER_RUN_ARGS?=-v /home:/home -v $(shell pwd):/code -e EXEC_WRAPPER="" -u "$(shell id -u):$(shell id -g)" -v /etc/passwd:/etc/passwd 27 | # The user can be passed to docker exec commands in Linux. 28 | # For example, "make shell user=root" for access to apt-get commands 29 | user?=$(shell id -u) 30 | group?=$(shell id ${user} -u) 31 | EXEC_WRAPPER?=$(DOCKER_CMD) exec --user="$(user):$(group)" -it $(PACKAGE_NAME) 32 | else ifeq (${OS}, Darwin) 33 | DOCKER_CMD?=docker 34 | DOCKER_RUN_ARGS?=-v ~/:/home/circleci -v $(shell pwd):/code -e EXEC_WRAPPER="" 35 | EXEC_WRAPPER?=$(DOCKER_CMD) exec -it $(PACKAGE_NAME) 36 | endif 37 | 38 | # Docker run mounts the local code directory, SSH (for git), and global git config information 39 | DOCKER_RUN_CMD?=$(DOCKER_CMD) compose run --name $(PACKAGE_NAME) $(DOCKER_RUN_ARGS) -d app 40 | 41 | # Print usage of main targets when user types "make" or "make help" 42 | .PHONY: help 43 | help: 44 | ifndef run 45 | @echo "Please choose one of the following targets: \n"\ 46 | " docker-setup: Setup Docker development environment\n"\ 47 | " conda-setup: Setup Conda development environment\n"\ 48 | " lock: Lock dependencies\n"\ 49 | " dependencies: Install dependencies\n"\ 50 | " shell: Start a shell\n"\ 51 | " test: Run tests\n"\ 52 | " tox: Run tests against all versions of Python\n"\ 53 | " lint: Run code linting and static checks\n"\ 54 | " lint-fix: Fix common linting errors\n"\ 55 | " type-check: Run Pyright type-checking\n"\ 56 | " docs: Build documentation\n"\ 57 | " docs-serve: Serve documentation\n"\ 58 | " docker-teardown: Spin down docker resources\n"\ 59 | "\n"\ 60 | "View the Makefile for more documentation" 61 | @exit 2 62 | else 63 | $(EXEC_WRAPPER) $(run) 64 | endif 65 | 66 | 67 | # Pull the latest container and start a detached run 68 | .PHONY: docker-start 69 | docker-start: 70 | $(DOCKER_CMD) compose pull 71 | $(DOCKER_RUN_CMD) 72 | 73 | 74 | # Lock dependencies 75 | .PHONY: lock 76 | lock: 77 | $(EXEC_WRAPPER) poetry lock --no-update 78 | $(EXEC_WRAPPER) poetry export --with dev --without-hashes -f requirements.txt > docs/requirements.txt 79 | 80 | 81 | # Install dependencies 82 | .PHONY: dependencies 83 | dependencies: 84 | mkdir -p .venv 85 | $(EXEC_WRAPPER) poetry install --no-ansi 86 | 87 | 88 | .PHONY: multi-db-setup 89 | multi-db-setup: 90 | -$(DOCKER_EXEC_WRAPPER) psql $(DATABASE_URL) -c "CREATE DATABASE ${MODULE_NAME}_local_other WITH TEMPLATE ${MODULE_NAME}_local" 91 | $(DOCKER_EXEC_WRAPPER) psql $(DATABASE_URL) -c "CREATE SCHEMA IF NOT EXISTS \"order\"" 92 | $(DOCKER_EXEC_WRAPPER) psql $(DATABASE_URL) -c "CREATE SCHEMA IF NOT EXISTS receipt;" 93 | 94 | 95 | # Sets up the local database 96 | .PHONY: db-setup 97 | db-setup: 98 | -psql postgres -c "CREATE USER postgres;" 99 | -psql postgres -c "ALTER USER postgres SUPERUSER;" 100 | -psql postgres -c "CREATE DATABASE ${MODULE_NAME}_local OWNER postgres;" 101 | -psql postgres -c "GRANT ALL PRIVILEGES ON DATABASE ${MODULE_NAME}_local to postgres;" 102 | $(EXEC_WRAPPER) python manage.py migrate 103 | 104 | 105 | # Sets up a conda development environment 106 | .PHONY: conda-create 107 | conda-create: 108 | -conda env create -f environment.yml -y 109 | $(EXEC_WRAPPER) poetry config virtualenvs.create false --local 110 | 111 | 112 | # Sets up a Conda development environment 113 | .PHONY: conda-setup 114 | conda-setup: EXEC_WRAPPER=conda run -n ${PACKAGE_NAME} --no-capture-output 115 | conda-setup: conda-create lock dependencies db-setup 116 | 117 | 118 | # Sets up a Docker development environment 119 | .PHONY: docker-setup 120 | docker-setup: docker-teardown docker-start lock dependencies 121 | 122 | 123 | # Spin down docker resources 124 | .PHONY: docker-teardown 125 | docker-teardown: 126 | $(DOCKER_CMD) compose down --remove-orphans 127 | 128 | 129 | # Run a shell 130 | .PHONY: shell 131 | shell: 132 | $(EXEC_WRAPPER) /bin/bash 133 | 134 | 135 | # Run pytest 136 | .PHONY: test 137 | test: 138 | $(EXEC_WRAPPER) pytest 139 | 140 | 141 | # Run full test suite 142 | .PHONY: full-test-suite 143 | full-test-suite: 144 | $(EXEC_WRAPPER) tox 145 | 146 | 147 | # Build documentation 148 | .PHONY: docs 149 | docs: 150 | $(EXEC_WRAPPER) mkdocs build -s 151 | 152 | 153 | # Serve documentation 154 | .PHONY: docs-serve 155 | docs-serve: 156 | $(EXEC_WRAPPER) mkdocs serve 157 | 158 | 159 | # Run code linting and static analysis. Ensure docs can be built 160 | .PHONY: lint 161 | lint: 162 | $(EXEC_WRAPPER) ruff format . --check 163 | $(EXEC_WRAPPER) ruff check ${MODULE_NAME} 164 | $(EXEC_WRAPPER) bash -c 'make docs' 165 | $(EXEC_WRAPPER) diff <(poetry export --with dev --without-hashes -f requirements.txt) docs/requirements.txt >/dev/null 2>&1 || exit 1 166 | 167 | 168 | # Fix common linting errors 169 | .PHONY: lint-fix 170 | lint-fix: 171 | $(EXEC_WRAPPER) ruff format . 172 | $(EXEC_WRAPPER) ruff check ${MODULE_NAME} --fix 173 | 174 | 175 | # Run Pyright type-checking 176 | .PHONY: type-check 177 | type-check: 178 | $(EXEC_WRAPPER) pyright $(MODULE_NAME) 179 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # django-pgtrigger 2 | 3 | `django-pgtrigger` helps you write [Postgres triggers](https://www.postgresql.org/docs/current/sql-createtrigger.html) for your Django models. 4 | 5 | ## Why should I use triggers? 6 | 7 | Triggers can solve a variety of complex problems more reliably, performantly, and succinctly than application code. 8 | For example, 9 | 10 | * Protecting operations on rows or columns (`pgtrigger.Protect`). 11 | * Making read-only models or fields (`pgtrigger.ReadOnly`). 12 | * Soft-deleting models (`pgtrigger.SoftDelete`). 13 | * Snapshotting and tracking model changes ([django-pghistory](https://django-pghistory.readthedocs.io/)). 14 | * Enforcing field transitions (`pgtrigger.FSM`). 15 | * Keeping a search vector updated for full-text search (`pgtrigger.UpdateSearchVector`). 16 | * Building official interfaces (e.g. enforcing use of `User.objects.create_user` and not `User.objects.create`). 17 | * Versioning models, mirroring fields, computing unique model hashes, and the list goes on... 18 | 19 | All of these examples require no overridden methods, no base models, and no signal handling. 20 | 21 | ## Quick start 22 | 23 | Install `django-pgtrigger` with `pip3 install django-pgtrigger` and add `pgtrigger` to `settings.INSTALLED_APPS`. 24 | 25 | `pgtrigger.Trigger` objects are added to `triggers` in model `Meta`. `django-pgtrigger` comes with several trigger classes, such as `pgtrigger.Protect`. In the following, we're protecting the model from being deleted: 26 | 27 | ```python 28 | import pgtrigger 29 | 30 | class ProtectedModel(models.Model): 31 | """This model cannot be deleted!""" 32 | 33 | class Meta: 34 | triggers = [ 35 | pgtrigger.Protect(name="protect_deletes", operation=pgtrigger.Delete) 36 | ] 37 | ``` 38 | 39 | When migrations are created and executed, `ProtectedModel` will raise an exception anytime a deletion is attempted. 40 | 41 | Let's extend this example further and only protect deletions on inactive objects. In this example, the trigger conditionally runs when the row being deleted (the `OLD` row in trigger terminology) is still active: 42 | 43 | ```python 44 | import pgtrigger 45 | 46 | class ProtectedModel(models.Model): 47 | """Active object cannot be deleted!""" 48 | is_active = models.BooleanField(default=True) 49 | 50 | class Meta: 51 | triggers = [ 52 | pgtrigger.Protect( 53 | name="protect_deletes", 54 | operation=pgtrigger.Delete, 55 | condition=pgtrigger.Q(old__is_active=True) 56 | ) 57 | ] 58 | ``` 59 | 60 | `django-pgtrigger` uses `pgtrigger.Q` and `pgtrigger.F` objects to conditionally execute triggers based on the `OLD` and `NEW` rows. Combining these Django idioms with `pgtrigger.Trigger` objects can solve a wide variety of problems without ever writing SQL. Users, however, can still use raw SQL for complex cases. 61 | 62 | Triggers are installed like other database objects. Run `python manage.py makemigrations` and `python manage.py migrate` to install triggers. 63 | 64 | If triggers are new to you, don't worry. The [pgtrigger docs](https://django-pgtrigger.readthedocs.io/) cover triggers in more detail and provide many examples. 65 | 66 | ## Compatibility 67 | 68 | `django-pgtrigger` is compatible with Python 3.9 - 3.13, Django 4.2 - 5.2, Psycopg 2 - 3, and Postgres 14 - 17. 69 | 70 | ## Documentation 71 | 72 | [View the django-pgtrigger docs here](https://django-pgtrigger.readthedocs.io/) to learn more about: 73 | 74 | * Trigger basics and motivation for using triggers. 75 | * How to use the built-in triggers and how to build custom ones. 76 | * Installing triggers on third-party models, many-to-many fields, and other advanced scenarios. 77 | * Writing conditional triggers. 78 | * Ignoring triggers dynamically and deferring trigger execution. 79 | * Multiple database, schema, and partitioning support. 80 | * Frequently asked questions, common issues, and upgrading. 81 | * The commands, settings, and module. 82 | 83 | ## Installation 84 | 85 | Install `django-pgtrigger` with: 86 | 87 | pip3 install django-pgtrigger 88 | After this, add `pgtrigger` to the `INSTALLED_APPS` setting of your Django project. 89 | 90 | ## Other Material 91 | 92 | After you've read the docs, check out [this tutorial](https://wesleykendall.github.io/django-pgtrigger-tutorial/) with interactive examples from a Django meetup talk. 93 | 94 | The [DjangoCon 2021 talk](https://www.youtube.com/watch?v=Tte3d4JjxCk) also breaks down triggers and shows several examples. 95 | 96 | ## Contributing Guide 97 | 98 | For information on setting up django-pgtrigger for development and contributing changes, view [CONTRIBUTING.md](CONTRIBUTING.md). 99 | 100 | ## Creators 101 | 102 | - [Wes Kendall](https://github.com/wesleykendall) 103 | 104 | ## Other Contributors 105 | 106 | - @jzmiller1 107 | - @rrauenza 108 | - @ralokt 109 | - @adamchainz 110 | - @danifus 111 | - @kekekekule 112 | - @peterthomassen 113 | - @pfouque 114 | -------------------------------------------------------------------------------- /devops.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | """ 4 | Devops functions for this package. Includes functions for automated 5 | package deployment, changelog generation, and changelog checking. 6 | 7 | This script is generated by the template at 8 | https://github.com/AmbitionEng/python-library-template 9 | 10 | Do not change this script! Any fixes or updates to this script should be made 11 | to https://github.com/AmbitionEng/python-library-template 12 | """ 13 | 14 | import os 15 | import subprocess 16 | import sys 17 | from typing import IO, Any, TypeAlias, Union 18 | 19 | 20 | File: TypeAlias = Union[IO[Any], int, None] 21 | 22 | 23 | def _shell( 24 | cmd: str, 25 | check: bool = True, 26 | stdin: File = None, 27 | stdout: File = None, 28 | stderr: File = None, 29 | ): # pragma: no cover 30 | """Runs a subprocess shell with check=True by default""" 31 | return subprocess.run(cmd, shell=True, check=check, stdin=stdin, stdout=stdout, stderr=stderr) 32 | 33 | 34 | def _publish_to_pypi() -> None: 35 | """ 36 | Uses poetry to publish to pypi 37 | """ 38 | if "PYPI_USERNAME" not in os.environ or "PYPI_PASSWORD" not in os.environ: 39 | raise RuntimeError("Must set PYPI_USERNAME and PYPI_PASSWORD env vars") 40 | 41 | _shell("poetry config http-basic.pypi ${PYPI_USERNAME} ${PYPI_PASSWORD}") 42 | _shell("poetry build") 43 | _shell("poetry publish -vvv -n", stdout=subprocess.PIPE) 44 | 45 | 46 | def deploy() -> None: 47 | """Deploys the package and uploads documentation.""" 48 | # Ensure proper environment 49 | if not os.environ.get("CIRCLECI"): # pragma: no cover 50 | raise RuntimeError("Must be on CircleCI to run this script") 51 | 52 | _publish_to_pypi() 53 | 54 | print("Deployment complete.") 55 | 56 | 57 | if __name__ == "__main__": 58 | if sys.argv[-1] == "deploy": 59 | deploy() 60 | else: 61 | raise RuntimeError(f'Invalid subcommand "{sys.argv[-1]}"') 62 | -------------------------------------------------------------------------------- /docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: "3.3" 2 | 3 | services: 4 | db: 5 | image: cimg/postgres:14.4 6 | volumes: 7 | - ./.db:/var/lib/postgresql/data 8 | environment: 9 | - POSTGRES_NAME=postgres 10 | - POSTGRES_USER=postgres 11 | - POSTGRES_PASSWORD=postgres 12 | app: 13 | image: opus10/circleci-python-library 14 | environment: 15 | - DATABASE_URL=postgres://postgres:postgres@db:5432/postgres 16 | depends_on: 17 | - db 18 | -------------------------------------------------------------------------------- /docs/advanced_db.md: -------------------------------------------------------------------------------- 1 | # Non-Standard Database Setups 2 | 3 | Here we cover details about non-standard database setups that might impact how you use triggers. 4 | 5 | ## Multiple Databases 6 | 7 | Triggers are migrated for multiple database just like models. If you define a custom router, triggers will be installed based on `allow_migrate`. See the [the Django docs on multiple databases](https://docs.djangoproject.com/en/4.1/topics/db/multi-db/) for more info. 8 | 9 | !!! warning 10 | 11 | If you migrate triggers and afterwards change the behavior of the router's `allow_migrate`, you risk having orphaned triggers installed on tables. 12 | 13 | The management commands and core installation functions work the same way, targetting an individual database like Django's `migrate` command. Each command can be supplied with a `-d` or `--database` option. 14 | 15 | For example, `python manage.py pgtrigger install --database other` will install all of the triggers on the `other` database. 16 | 17 | If `allow_migrate` ignores a particular model for a database, the installation status will show as `UNALLOWED` when using `python manage.py pgtrigger ls`. 18 | 19 | !!! note 20 | 21 | If you've configured `settings.INSTALL_ON_MIGRATE`, triggers will be installed for the same database as the `migrate` command. 22 | 23 | Dynamic runtime functions [pgtrigger.ignore][], [pgtrigger.schema][], and [pgtrigger.constraints][] operate on all postgres databases at once unless the `databases` argument is provided. 24 | 25 | ## Schemas 26 | 27 | There are two common ways of using Postgres schemas in Django, both of which work with `django-pgtrigger`: 28 | 29 | 1. Create a database in `settings.DATABASES` for each schema, configuring the `search_path` in the `OPTIONS`. 30 | 2. Use an app like [django-tenants](https://github.com/django-tenants/django-tenants) to dynamically set the `search_path` for a single database. 31 | 32 | When using the first approach, use the multi-database support detailed in the previous section. For the second approach, `django-pgtrigger` comes with the following functionality to dynamically set the `search_path`: 33 | 34 | 1. Pass `--schema` (`-s`) arguments for management commands. For example, this sets `search_path` to `myschema,public` and shows trigger installation status relative to those schemas: 35 | 36 | ``` 37 | python manage.py pgtrigger ls -s my_schema -s public 38 | ``` 39 | 40 | 2. Programmatically set the search path with [pgtrigger.schema][]. For example, this sets the `search_path` to `myschema,public`: 41 | 42 | ```python 43 | with pgtrigger.schema("myschema", "public"): 44 | # seach_path is set to "myschema,public". Any nested invocations of 45 | # pgtrigger.schema will append to the path if not currently 46 | # present 47 | ``` 48 | 49 | !!! note 50 | 51 | If you find yourself wrapping the `django-pgtrigger` API with [pgtrigger.schema][], open an issue and let us know about your use case. We may consider making it a first-class citizen in the API if it's common. 52 | 53 | The final thing to keep in mind with multi-schema support is that [pgtrigger.ignore][] uses a special Postgres function for ignoring triggers that's installed under the public schema. The function is always referenced with a fully-qualified name. 54 | 55 | If you don't use the public schema, configure the schema with `settings.PGTRIGGER_SCHEMA`. Setting this to `None` uses a relative path when installing and calling the function. 56 | 57 | ## Partitions 58 | 59 | `django-pgtrigger` supports tables that use [Postgres table partitioning](https://www.postgresql.org/docs/current/ddl-partitioning.html) with no additional configuration. 60 | 61 | !!! note 62 | 63 | Row-level triggers are only available for partitioned tables in Postgres 13 and above. Triggers cannot be installed or uninstalled on a per-partition basis. Installing a trigger on a partitioned table installs it for all partitions. -------------------------------------------------------------------------------- /docs/advanced_installation.md: -------------------------------------------------------------------------------- 1 | # Installation Edge Cases 2 | 3 | ## Third-party models 4 | 5 | Install triggers on third-party models by declaring them on a proxy model. For example, here we protect Django's `User` model from being deleted: 6 | 7 | ```python 8 | class UserProxy(User): 9 | class Meta: 10 | proxy = True 11 | triggers = [ 12 | pgtrigger.Protect(name='protect_deletes', operation=pgtrigger.Delete) 13 | ] 14 | ``` 15 | 16 | ## Default many-to-many "through" models 17 | 18 | Similar to third-party models, we can also install triggers against default many-to-many "through" models by using a proxy model. Here we protect Django `User` group relationships from being deleted: 19 | 20 | ```python 21 | class UserGroupTriggers(User.groups.through): 22 | class Meta: 23 | proxy = True 24 | triggers = [ 25 | pgtrigger.Protect(name='protect_deletes', operation=pgtrigger.Delete) 26 | ] 27 | ``` 28 | 29 | !!! warning 30 | 31 | Django doesn't fully support making proxy models from default through relationships. Reversing migrations can sometimes throw `InvalidBases` errors. We recommend creating a custom through model when possible. See the [Django docs on making custom "through" models](https://docs.djangoproject.com/en/4.0/topics/db/models/#extra-fields-on-many-to-many-relationships). 32 | 33 | ## Declaring triggers in base models 34 | 35 | Triggers can be declared in an abstract model and inherited. Here is a base model for soft-delete models: 36 | 37 | ```python 38 | class BaseSoftDelete(models.Model): 39 | is_active = models.BooleanField(default=True) 40 | 41 | class Meta: 42 | abstract = True 43 | triggers = [pgtrigger.SoftDelete(name="soft_delete", field="is_active")] 44 | ``` 45 | 46 | Keep in mind that `Meta` class inheritance follows standard Django convention. If the child model defines a `Meta` class, you will need to inherit the parent's `Meta` class like so: 47 | 48 | ```python 49 | class ChildModel(BaseSoftDelete): 50 | class Meta(BaseSoftDelete.Meta): 51 | ordering = ["is_active"] 52 | ``` 53 | 54 | ## Programmatically registering triggers 55 | 56 | Triggers can be registered programmatically with [pgtrigger.register][]. It can be used as a decorator on a model or called like so: 57 | 58 | ```python 59 | # Register a protection trigger for a model 60 | pgtrigger.register(pgtrigger.Protect(...))(MyModel) 61 | ``` 62 | 63 | !!! warning 64 | 65 | Although triggers can be registered programmatically, we don't recommend doing this except for advanced use cases. Registering a trigger to a model of a third-party app will create migrations in that app. This could result in migrations not being added to your codebase, which can result in triggers not being installed. 66 | 67 | 68 | ## Turning off migration integration 69 | 70 | `django-pgtrigger` patches Django's migration system so that triggers are installed and updated in migrations. If this is undesirable, you can disable the migration integration by setting `settings.PGTRIGGER_MIGRATIONS` to `False`. After this, you are left with two options: 71 | 72 | 1. Manually install triggers with the commands detailed in the next section. 73 | 2. Run trigger installation after every `python manage.py migrate` by setting `settings.PGTRIGGER_INSTALL_ON_MIGRATE` to `True`. Keep in mind that reversing migrations can cause issues when installing triggers this way. 74 | 75 | !!! warning 76 | 77 | There are known issues with installing triggers after migrations that cannot be fixed. For example, reversing migrations can result in trigger installation errors, and race conditions can happen if triggers are installed after the underlying tables have been migrated. 78 | 79 | ## Manual installation, enabling, and disabling 80 | 81 | The following commands allow one to manually manage trigger installation 82 | and are detailed more in the [Commands](commands.md) section: 83 | 84 | !!! danger 85 | 86 | The commands are are global operations. Use these commands with extreme caution, especially if the triggers are managed by migrations. If you need to temporarily ignore a trigger inside your application, see the [Ignoring Trigggers](ignoring_triggers.md) section. 87 | 88 | 89 | * `python manage.py pgtrigger install`: Install triggers 90 | * `python manage.py pgtrigger uninstall`: Uninstall triggers 91 | * `python manage.py pgtrigger enable`: Enable triggers 92 | * `python manage.py pgtrigger disable`: Disable triggers 93 | * `python manage.py pgtrigger prune`: Uninstall triggers created by `django-pgtrigger` that are no longer in the codebase. 94 | 95 | ## Showing installation status 96 | 97 | Use `python manage.py pgtrigger ls` to see the installation status of individual triggers or all triggers at once. View the [Commands](commands.md) section for descriptions of the different installation states. 98 | -------------------------------------------------------------------------------- /docs/basics.md: -------------------------------------------------------------------------------- 1 | # Basics 2 | 3 | ## The anatomy of a trigger 4 | 5 | Postgres triggers are database functions written in PL/pgSQL that execute based on events 6 | and conditions. 7 | 8 | The [pgtrigger.Trigger][] object is the base class for all triggers in `django-pgtrigger`. Its attributes mirror the syntax required for [making a Postgres trigger](https://www.postgresql.org/docs/current/sql-createtrigger.html). Here are the most common attributes you'll use: 9 | 10 | * **name** 11 | 12 | The identifying name of trigger. Is unique for every model and must be less than 48 characters. 13 | 14 | * **operation** 15 | 16 | The table operation that fires a trigger. Operations are [pgtrigger.Update][], [pgtrigger.Insert][], [pgtrigger.Delete][], [pgtrigger.Truncate][], or [pgtrigger.UpdateOf][]. They can be `OR`ed together (e.g. `pgtrigger.Insert | pgtrigger.Update`) to configure triggers on a combination of operations. 17 | 18 | !!! note 19 | 20 | [pgtrigger.UpdateOf][] fires when columns appear in an `UPDATE` statement. It will not fire if other triggers update the columns. See the notes in the [Postgres docs](https://www.postgresql.org/docs/12/sql-createtrigger.html) for more information. 21 | 22 | * **when** 23 | 24 | When the trigger should run in relation to the operation. [pgtrigger.Before][] executes the trigger before the operation, and vice versa for [pgtrigger.After][]. [pgtrigger.InsteadOf][] is used for SQL views. 25 | 26 | !!! note 27 | 28 | [pgtrigger.Before][] and [pgtrigger.After][] can be used on SQL views under some circumstances. See [the Postgres docs](https://www.postgresql.org/docs/12/sql-createtrigger.html) for a breakdown. 29 | 30 | * **condition** *(optional)* 31 | 32 | Conditionally execute the trigger based on the `OLD` or `NEW` rows. 33 | 34 | Use [pgtrigger.Q][] and [pgtrigger.F][] objects for constructing `WHERE` clauses with the `OLD` and `NEW` rows. See [the conditional triggers section](conditional.md) for more details and other utilities. 35 | 36 | !!! note 37 | 38 | Be sure to familiarize yourself with `OLD` and `NEW` rows when writing conditions by consulting the [Postgres docs](https://www.postgresql.org/docs/current/plpgsql-trigger.html). For example, `OLD` is always `NULL` in [pgtrigger.Insert][] triggers. 39 | 40 | Here are attributes you'll need when writing more complex triggers. 41 | 42 | * **func** 43 | 44 | The raw PL/pgSQL function that is executed. 45 | 46 | !!! note 47 | 48 | This is *not* the entire declared trigger function, but rather the snippet of PL/pgSQL that is nested in the `DECLARE ... BEGIN ... END` portion of the trigger. 49 | 50 | * **declare** *(optional)* 51 | 52 | Define additional variable declarations as a list of `(variable_name, variable_type)` tuples. For example `declare=[('my_var_1', 'BOOLEAN'), ('my_var_2', 'JSONB')]`. 53 | 54 | * **level** *(optional, default=pgtrigger.Row)* 55 | 56 | Configures the trigger to fire once for every row ([pgtrigger.Row][]) or once for every statement ([pgtrigger.Statement][]). 57 | 58 | * **referencing** *(optional)* 59 | 60 | References the `OLD` and `NEW` rows as transition tables in statement-level triggers. For example, `pgtrigger.Referencing(old='old_table_name', new='new_table_name')` will make an `old_table_name` and `new_table_name` table available as transition tables. See [this StackExchange answer](https://dba.stackexchange.com/a/177468) for additional details, and see the [Cookbook](cookbook.md) for an example. 61 | 62 | * **timing** *(optional)* 63 | 64 | Create a deferrable `CONSTRAINT` trigger when set. Use [pgtrigger.Immediate][] to execute the trigger at the end of a statement and [pgtrigger.Deferred][] to execute it at the end of a transaction. 65 | 66 | !!! note 67 | 68 | Deferrable triggers must have the `level` set to [pgtrigger.Row][] and `when` set to [pgtrigger.After][]. 69 | 70 | ## Defining and installing triggers 71 | 72 | Triggers are defined in the `triggers` attribute of the model `Meta` class. For example, this trigger protects the model from being deleted: 73 | 74 | ```python 75 | from django.db import models 76 | import pgtrigger 77 | 78 | 79 | class CannotDelete(models.Model): 80 | class Meta: 81 | triggers = [ 82 | pgtrigger.Protect(name='protect_deletes', operation=pgtrigger.Delete) 83 | ] 84 | ``` 85 | 86 | Triggers are installed by first running `python manage.py makemigrations` and then `python manage.py migrate`. 87 | 88 | If you'd like to install a trigger on a model of a third-party app, see the [Advanced Installation](advanced_installation.md) section. This section also covers how you can manually install, enable, and disable triggers globally. 89 | 90 | 91 | ## The advantages over signals and model methods 92 | 93 | There are three key advantages to using triggers over implementing the logic in a [Django signal handler](https://docs.djangoproject.com/en/4.1/topics/signals/) or by overriding model methods: 94 | 95 | 1. **Reliability**: Unlike Python code, triggers run alongside queries in the database, ensuring that nothing falls through the cracks. On the other hand, signals and model methods can provide a false sense of security. For example, signals aren't fired for `bulk_create`, and custom model methods aren't called in data migrations by default. Third party apps that bypass the ORM will also not work reliably. 96 | 2. **Complexity**: Complexity can balloon when trying to override models, managers, or querysets to accomplish the same logic a trigger can support. Even simple routines such as conditionally running code based on a changed field are difficult to implement correctly and prone to race conditions. 97 | 3. **Performance**: Triggers can perform SQL queries without needing to do expensive round trips to the database to fetch data. This can be a major performance enhancement for routines like history tracking or data denormalization. 98 | -------------------------------------------------------------------------------- /docs/commands.md: -------------------------------------------------------------------------------- 1 | # Commands 2 | 3 | `django-pgtrigger` comes with the `python manage.py pgtrigger` command, which has several subcommands that are described below. 4 | 5 | ## ls 6 | 7 | List all triggers managed by `django-pgtrigger`. 8 | 9 | **Options** 10 | 11 | [uris ...] 12 | Trigger URIs to list. 13 | 14 | -d, --database List triggers on this database. 15 | -s, --schema Use this schema as the search path. Can be provided multiple times. 16 | 17 | **Ouput** 18 | 19 | The following installation status markers are displayed: 20 | 21 | - `INSTALLED`: The trigger is installed and up to date 22 | - `OUTDATED`: The trigger is installed, but it has not been migrated to the current version. 23 | - `UNINSTALLED`: The trigger is not installed. 24 | - `PRUNE`: A trigger is no longer in the codebase and still installed. 25 | - `UNALLOWED`: Trigger installation is not allowed for this database. Only applicable in a multi-database environment. 26 | 27 | Note that every installed trigger, including ones that will be pruned, will show whether they are enabled or disabled. Disabled triggers are installed but do not run. 28 | 29 | ## install 30 | 31 | Install triggers. If no arguments are provided, all triggers are installed and orphaned triggers are pruned. 32 | 33 | **Options** 34 | 35 | [uris ...] 36 | Trigger URIs to install. 37 | 38 | -d, --database Install triggers on this database. 39 | -s, --schema Use this schema as the search path. Can be provided multiple times. 40 | 41 | ## uninstall 42 | 43 | Uninstall triggers. If no arguments are provided, all triggers are uninstalled and orphaned triggers will be pruned. 44 | 45 | !!! danger 46 | 47 | Running `uninstall` will globally uninstall triggers. If you need to temporarily ignore a trigger, see the [Ignoring Execution](ignoring_triggers.md) section. 48 | 49 | **Options** 50 | 51 | [uris ...] 52 | Trigger URIs to uninstall. 53 | 54 | -d, --database Uninstall triggers on this database. 55 | -s, --schema Use this schema as the search path. Can be provided multiple times. 56 | 57 | ## enable 58 | 59 | Enable triggers. 60 | 61 | **Options** 62 | 63 | [uris ...] 64 | Trigger URIs to enable. 65 | 66 | -d, --database Enable triggers on this database. 67 | -s, --schema Use this schema as the search path. Can be provided multiple times. 68 | 69 | 70 | ## disable 71 | 72 | Disable triggers. 73 | 74 | !!! danger 75 | 76 | Running `disable` will globally disable the execution of triggers. If you need to temporarily ignore a trigger, see the [Ignoring Execution](ignoring_triggers.md) section. 77 | 78 | **Options** 79 | 80 | [uris ...] 81 | Trigger URIs to enable. 82 | 83 | -d, --database Disable triggers on this database. 84 | -s, --schema Use this schema as the search path. Can be provided multiple times. 85 | 86 | ## prune 87 | 88 | Uninstall any triggers managed by `django-pgtrigger` that are no longer in the codebase. 89 | 90 | !!! note 91 | 92 | Pruning happens automatically when doing `python manage.py pgtrigger install` or `python manage.py pgtrigger uninstall`. 93 | 94 | **Options** 95 | 96 | -d, --database Prune triggers on this database. 97 | -s, --schema Use this schema as the search path. Can be provided multiple times. 98 | -------------------------------------------------------------------------------- /docs/conditional.md: -------------------------------------------------------------------------------- 1 | # Conditional Triggers 2 | 3 | Here's a brief guide on the many ways one can create conditional row-level triggers using `django-pgtrigger`. We start with the high-level utilities and make our way towards lower-level ones. 4 | 5 | Remember, row-level triggers have access to either the `NEW` row being inserted or updated, or the `OLD` row being updated or deleted. These variables are copies of the row and can be used in the conditions of the trigger. Updates triggers, for example, can conditionally execute based on both the values of the row before the update (the `OLD` row) and the row after the modification (the `NEW` row). 6 | 7 | !!! note 8 | 9 | Consult the [Postgres docs](https://www.postgresql.org/docs/current/plpgsql-trigger.html) for more information on these variables. 10 | 11 | We'll first dive into update-based triggers and the utilities `django-pgtrigger` provides for detecting changes on models. 12 | 13 | ## Field Change Conditions 14 | 15 | The following conditions are provided out of the box for conveniently expressing field changes: 16 | 17 | - [pgtrigger.AnyChange][]: If any supplied fields change, trigger the condition. 18 | - [pgtrigger.AnyDontChange][]: If any supplied fields don't change, trigger the condition. 19 | - [pgtrigger.AllChange][]: If all supplied fields change, trigger the condition. 20 | - [pgtrigger.AllDontChange][]: If all supplied fields don't change, trigger the condition. 21 | 22 | For example, let's use this model: 23 | 24 | ```python 25 | class MyModel(models.Model): 26 | int_field = models.IntegerField() 27 | char_field = models.CharField(null=True) 28 | dt_field = models.DateTimeField(auto_now=True) 29 | ``` 30 | 31 | The following trigger will raise an exception if an update happens that doesn't change a single field. 32 | 33 | ```python 34 | pgtrigger.Protect(operation=pgtrigger.Update, condition=~pgtrigger.AnyChange()) 35 | ``` 36 | 37 | This is also equivalent to doing: 38 | 39 | ```python 40 | pgtrigger.Protect(operation=pgtrigger.Update, condition=pgtrigger.AllDontChange()) 41 | ``` 42 | 43 | !!! remember 44 | 45 | If no arguments are provided to any of these utilities, they operate over all fields on the model. 46 | 47 | Let's say we want to block updates if any changes happen to the int or char fields: 48 | 49 | ```python 50 | pgtrigger.Protect( 51 | operation=pgtrigger.Update, 52 | condition=pgtrigger.AnyChange("int_field", "char_field") 53 | ) 54 | ``` 55 | 56 | This is how the [pgtrigger.ReadOnly][] trigger is implemented. Underneath the hood, the condition looks like this: 57 | 58 | ```sql 59 | OLD.int_field IS DISTINCT FROM NEW.int_field 60 | OR OLD.char_field IS DISTINCT FROM NEW.char_field 61 | ``` 62 | 63 | !!! note 64 | 65 | `IS DISTINCT FROM` helps ensure that nullable objects are correctly compared since null never equals null. 66 | 67 | One can also exclude fields in the condition. For example, this condition fires only if every field but the excluded ones change: 68 | 69 | ```python 70 | pgtrigger.AllChange(exclude=["dt_field"]) 71 | ``` 72 | 73 | To automatically ignore `auto_now` and `auto_now_add` datetime fields, do: 74 | 75 | ```python 76 | # Fires on changes to any fields except auto_now and auto_now_add fields 77 | pgtrigger.AnyChange(exclude_auto=True) 78 | ``` 79 | 80 | !!! remember 81 | 82 | Included and excluded fields can both be supplied. Included fields are used as the initial fields before `exclude` and `exclude_auto` remove fields. 83 | 84 | ## Targetting old and new fields with `pgtrigger.Q` and `pgtrigger.F` 85 | 86 | We previously covered various change condition utilties. These only operate over update-based triggers. One can create fine-grained trigger conditions for all operations by using [pgtrigger.Q][] and [pgtrigger.F][] constructs. 87 | 88 | For example, let's use our model from above again: 89 | 90 | ```python 91 | class MyModel(models.Model): 92 | int_field = models.IntegerField() 93 | char_field = models.CharField(null=True) 94 | dt_field = models.DateTimeField(auto_now=True) 95 | ``` 96 | 97 | The following condition will fire whenever the old row has an `int_field` greater than zero: 98 | 99 | ```python 100 | pgtrigger.Q(old__int_field__gt=0) 101 | ``` 102 | 103 | Similar to Django's syntax, the [pgtrigger.Q][] object can reference the `old__` and `new__` row. The [pgtrigger.F][] object can also be used for doing comparisons. For example, here we only fire when the `int_field` of the old row is greater than the int field of the new row. 104 | 105 | ```python 106 | pgtrigger.Q(old__int_field__gt=pgtrigger.F("new__int_field")) 107 | ``` 108 | 109 | Remember to use the `__df` operator for `DISTINCT FROM` and `__ndf` for `NOT DISTINCT FROM`. This is generally the behavior one desires when checking for changes of nullable fields. For example, this condition fires only when `char_field` is not distinct from its old version. 110 | 111 | ```python 112 | pgtrigger.Q(old__char_field__ndf=pgtrigger.F("new__char_field")) 113 | ``` 114 | 115 | !!! note 116 | 117 | The above is equivalent to doing `pgtrigger.AnyDontChange("char_field")` 118 | 119 | Finally, [pgtrigger.Q][] objects can be negated, and-ed, and or-ed just like django `Q` objects: 120 | 121 | ```python 122 | pgtrigger.Q(old__char_field__ndf=pgtrigger.F("new__char_field")) 123 | | pgtrigger.Q(new__int_field=0) 124 | ``` 125 | 126 | ## Raw SQL conditions 127 | 128 | The utilities above should handle the majority of use cases when expressing conditions; however, users can still express raw SQL with [pgtrigger.Condition][]. For example, here's a condition that fires if any field changes: 129 | 130 | ```python 131 | pgtrigger.Condition("OLD.* IS DISTINCT FROM NEW.*") 132 | ``` 133 | 134 | !!! note 135 | 136 | The above is equivalent to `pgtrigger.AnyChange()`. 137 | 138 | ## Conditions across multiple models 139 | 140 | Remember, trigger conditions can only be expressed based on the rows of the current model. One can't, for example, reference a joined foreign key's value. This isn't a limitation in `django-pgtrigger` but rather a limitation in Postgres. 141 | 142 | Custom conditional logic than spans multiple tables must happen inside the function as an `if/else` type of statement. [See this resource](https://www.postgresqltutorial.com/postgresql-plpgsql/plpgsql-if-else-statements/) for an example of what this looks like. 143 | 144 | ## Statement-Level Triggers 145 | 146 | Postgres only natively supports writing conditions on row-level triggers. `django-pgtrigger`, however, provides a [pgtrigger.Composer][] trigger that can aid in writing conditional statement-level triggers. 147 | 148 | See [the examples in the statement-level trigger section for more information](./statement.md). -------------------------------------------------------------------------------- /docs/contributing.md: -------------------------------------------------------------------------------- 1 | --8<-- "CONTRIBUTING.md" -------------------------------------------------------------------------------- /docs/css/mkdocs-material.css: -------------------------------------------------------------------------------- 1 | .md-typeset__table { 2 | min-width: 100%; 3 | } 4 | 5 | .md-typeset table:not([class]) { 6 | display: table; 7 | } 8 | 9 | :root { 10 | --md-primary-fg-color: #1d1f29; 11 | --md-primary-fg-color--light: #1d1f29; 12 | --md-primary-fg-color--dark: #1d1f29; 13 | } 14 | 15 | .md-content { 16 | --md-typeset-a-color: #00bc70; 17 | } 18 | 19 | .md-footer { 20 | background-color: #1d1f29; 21 | } 22 | .md-footer-meta { 23 | background-color: #1d1f29; 24 | } 25 | 26 | readthedocs-flyout { 27 | display: none; 28 | } -------------------------------------------------------------------------------- /docs/css/mkdocstrings.css: -------------------------------------------------------------------------------- 1 | div.doc-contents:not(.first) { 2 | padding-left: 25px; 3 | border-left: .05rem solid var(--md-typeset-table-color); 4 | } -------------------------------------------------------------------------------- /docs/deferrable.md: -------------------------------------------------------------------------------- 1 | # Deferrable Triggers 2 | 3 | Triggers are "deferrable" if their execution can be postponed until the end of the transaction. This behavior can be desirable for certain situations. 4 | 5 | For example, here we ensure a `Profile` model always exists for every `User`: 6 | 7 | ```python 8 | class Profile(models.Model): 9 | user = models.OneToOneField(User, on_delete=models.CASCADE) 10 | 11 | class UserProxy(User): 12 | class Meta: 13 | proxy = True 14 | triggers = [ 15 | pgtrigger.Trigger( 16 | name="profile_for_every_user", 17 | when=pgtrigger.After, 18 | operation=pgtrigger.Insert, 19 | timing=pgtrigger.Deferred, 20 | func=f""" 21 | IF NOT EXISTS (SELECT FROM {Profile._meta.db_table} WHERE user_id = NEW.id) THEN 22 | RAISE EXCEPTION 'Profile does not exist for user %', NEW.id; 23 | END IF; 24 | RETURN NULL; 25 | """ 26 | ) 27 | ] 28 | ``` 29 | 30 | This trigger ensures that any creation of a `User` will fail if a `Profile` does not exist. Note that we must create them both in a transaction: 31 | 32 | ```python 33 | # This will succeed since the user has a profile when 34 | # the transaction completes 35 | with transaction.atomic(): 36 | user = User.objects.create() 37 | Profile.objects.create(user=user) 38 | 39 | # This will fail since it is not in a transaction 40 | user = User.objects.create() 41 | Profile.objects.create(user=user) 42 | ``` 43 | 44 | ## Ignoring deferrable triggers 45 | 46 | Deferrable triggers can be ignored, but remember that they execute at the very end of a transaction. If [pgtrigger.ignore][] does not wrap the transaction, the deferrable trigger will not be ignored. 47 | 48 | Here is a correct way of ignoring the deferrable trigger from the initial example: 49 | 50 | ```python 51 | with pgtrigger.ignore("my_app.UserProxy:profile_for_every_user"): 52 | # Use durable=True, otherwise we may be wrapped in a parent 53 | # transaction 54 | with transaction.atomic(durable=True): 55 | # We no longer need a profile for a user... 56 | User.objects.create(...) 57 | ``` 58 | 59 | Here's an example of code that will fail: 60 | 61 | ```python 62 | with transaction.atomic(): 63 | # This ignore does nothing for this trigger. `pgtrigger.ignore` 64 | # will no longer be in effect by the time the trigger runs at the 65 | # end of the transaction. 66 | with pgtrigger.ignore("my_app.UserProxy:profile_for_every_user"): 67 | # The trigger will raise an exception 68 | User.objects.create(...) 69 | ``` 70 | 71 | ## Adjusting runtime behavior 72 | 73 | When a deferrable trigger is declared, the `timing` attribute can be adjusted at runtime using [pgtrigger.constraints][]. This function mimics Postgres's `SET CONSTRAINTS` statement. Check [the Postgres docs for more info](https://www.postgresql.org/docs/current/sql-set-constraints.html). 74 | 75 | [pgtrigger.constraints][] takes the new timing value and (optionally) a list of trigger URIs over which to apply the value. The value is in effect until the end of the transaction. 76 | 77 | Let's take our original example. We can set the trigger to immediately run, causing it to throw an error: 78 | 79 | ```python 80 | with transaction.atomic(): 81 | user = User.objects.create(...) 82 | 83 | # Make the deferrable trigger fire immediately. This will cause an exception 84 | # because a profile has not yet been created for the user 85 | pgtrigger.constraints(pgtrigger.Immediate, "auth.User:profile_for_every_user") 86 | ``` 87 | 88 | Keep in mind that the constraint settings stay in effect until the end of the transaction. If a parent transaction wraps our code, timing overrides will persist. 89 | 90 | !!! tip 91 | 92 | You can do the opposite of our example, creating triggers with `timing=pgtrigger.Immediate` and deferring their execution dynamically. 93 | 94 | !!! note 95 | 96 | In a multi-schema setup, only triggers in the schema search path will be overridden with [pgtrigger.constraints][]. 97 | -------------------------------------------------------------------------------- /docs/faq.md: -------------------------------------------------------------------------------- 1 | # Frequently Asked Questions 2 | 3 | ## Triggers are scary, don't you think? 4 | 5 | Enforcing data integrity in application code is what you should be afraid of. Triggers, like uniqueness constraints, are a blessing that help solidify your data modeling. 6 | 7 | The best way to ensure triggers are behaving correctly is to: 8 | 9 | 1. Write tests for them. 10 | 2. Run `python manage.py pgtrigger ls` to verify they are installed in production. 11 | 12 | ## Why not implement logic with signals or model methods? 13 | 14 | See [this section from the docs](basics.md#advantages_of_triggers) 15 | 16 | ## Why can't I join foreign keys in conditions? 17 | 18 | Postgres only allows columns of the changed rows to be used in trigger conditions, and data cannot be joined. That's why, for example, one cannot write a condition like `Q(old__foreign_key__field="value")`. 19 | 20 | Conditional logic like this must be performed in the trigger function itself by manually joining data. 21 | 22 | ## How do I test triggers? 23 | 24 | Manipulate your models in your test suite and verify the expected result happens. 25 | 26 | If you've turned off migrations for your test suite, call [pgtrigger.install][] after the database is set up or set `settings.PGTRIGGER_INSTALL_ON_MIGRATE` to `True` to ensure triggers are installed for your tests. 27 | 28 | !!! warning 29 | 30 | Be sure the `settings.PGTRIGGER_INSTALL_ON_MIGRATE` setting is isolated to your test suite, otherwise it could cause unexpected problems in production when reversing migrations. 31 | 32 | When a failure-based trigger like [pgtrigger.Protect][] fails, a `django.db.utils.InternalError` is raised and can be caught in your test function. Keep in mind that this will place the current transaction in an errored state. If you'd like to test triggers like this without needing to use a transaction test case, wrap the assertion in `transaction.atomic`. This is illustrated below with pseudocode using [pytest-django](https://pytest-django.readthedocs.io/en/latest/). 33 | 34 | ```python 35 | from djagno.db import transaction 36 | from django.db.utils import InternalError 37 | import pytest 38 | 39 | @pytest.mark.django_db 40 | def test_protection_trigger(): 41 | with pytest.raises(InternalError, match="Cannot delete"), transaction.atomic(): 42 | # Try to delete protected model 43 | 44 | # Since the above assertion is wrapped in transaction.atomic, we will still 45 | # have a valid transaction in our test case here 46 | ``` 47 | 48 | ## How do I disable triggers? 49 | 50 | Use [pgtrigger.ignore][] if you need to temporarily ignore triggers in your application (see [Ignoring Execution](ignoring_triggers.md)). Only use the core installation commands if you want to disable or uninstall triggers globally across the **entire application** (see the [Commands](commands.md) section). 51 | 52 | ## How can I reference the table name in a custom function? 53 | 54 | When writing a trigger in `Meta`, it's not possible to access other model meta properties like `db_table`. Use [pgtrigger.Func][] to get around this. See [this example from the cookbook](cookbook.md#func_model_properties). 55 | 56 | ## How can I report issues or request features 57 | 58 | Open a [discussion](https://github.com/Opus10/django-pgtrigger/discussions) for a feature request. You're welcome to pair this with a pull request, but it's best to open a discussion first if the feature request is not trivial. 59 | 60 | For bugs, open an [issue](https://github.com/Opus10/django-pgtrigger/issues). 61 | 62 | 63 | -------------------------------------------------------------------------------- /docs/further_reading.md: -------------------------------------------------------------------------------- 1 | # Talks and Tutorials 2 | 3 | Check out [this tutorial](https://wesleykendall.github.io/django-pgtrigger-tutorial/) with interactive examples from a Django meetup talk. 4 | 5 | The [DjangoCon 2021 talk](https://www.youtube.com/watch?v=Tte3d4JjxCk/) also breaks down triggers and shows several examples. -------------------------------------------------------------------------------- /docs/ignoring_triggers.md: -------------------------------------------------------------------------------- 1 | # Ignoring Execution 2 | 3 | ## Overview 4 | 5 | [pgtrigger.ignore][] is a decorator and context manager that temporarily ignores triggers for a single 6 | thread of execution. Here we ignore deletion protection: 7 | 8 | ```python 9 | class CannotDelete(models.Model): 10 | class Meta: 11 | triggers = [ 12 | pgtrigger.Protect(name="protect_deletes", operation=pgtrigger.Delete) 13 | ] 14 | 15 | 16 | # Bypass deletion protection 17 | with pgtrigger.ignore("my_app.CannotDelete:protect_deletes"): 18 | CannotDelete.objects.all().delete() 19 | ``` 20 | 21 | As shown above, [pgtrigger.ignore][] takes a trigger URI that is formatted as `{app_label}.{model_name}:{trigger_name}`. Multiple trigger URIs can be given to [pgtrigger.ignore][], and [pgtrigger.ignore][] can be nested. If no trigger URIs are provided to [pgtrigger.ignore][], all triggers are ignored. 22 | 23 | !!! tip 24 | 25 | See all trigger URIs with `python manage.py pgtrigger ls` 26 | 27 | By default, [pgtrigger.ignore][] configures ignoring triggers on every postgres database. This can be changed with the `databases` argument. 28 | 29 | !!! important 30 | 31 | Remember, [pgtrigger.ignore][] ignores the execution of a trigger on a per-thread basis. This is very different from disabling a trigger or uninstalling a trigger globally. See the [Advanced Installation](advanced_installation.md) section for more details on managing the installation of triggers. 32 | 33 | ## Transaction notes 34 | 35 | [pgtrigger.ignore][] flushes a temporary Postgres variable at the end of the context manager if running in a transaction. This could cause issues for transactions that are in an errored state. 36 | 37 | Here's an example of when this case happens: 38 | 39 | ```python 40 | with transaction.atomic(): 41 | with ptrigger.ignore("app.Model:protect_inserts"): 42 | try: 43 | # Create an object that raises an integrity error 44 | app.Model.objects.create(unique_key="duplicate") 45 | except IntegrityError: 46 | # Ignore the integrity error 47 | pass 48 | 49 | # When we exit the context manager here, it will try to flush 50 | # a local Postgres variable. This causes an error because the transaction 51 | # is in an errored state. 52 | ``` 53 | 54 | If you're ignoring triggers and handling database errors, there are two ways to prevent this error from happening: 55 | 56 | 1. Wrap the outer transaction in `with pgtrigger.ignore.session():` so that the session is completed outside the transaction. 57 | 2. Wrap the inner `try/except` in `with transaction.atomic():` so that the errored part of the transaction is rolled back before the [pgtrigger.ignore][] context manager ends. 58 | -------------------------------------------------------------------------------- /docs/index.md: -------------------------------------------------------------------------------- 1 | # django-pgtrigger 2 | 3 | `django-pgtrigger` helps you write [Postgres triggers](https://www.postgresql.org/docs/current/sql-createtrigger.html) for your Django models. 4 | 5 | ## Why should I use triggers? 6 | 7 | Triggers can solve a variety of complex problems more reliably, performantly, and succinctly than application code. For example, 8 | 9 | * Protecting operations on rows or columns ([pgtrigger.Protect][]). 10 | * Making read-only models or fields ([pgtrigger.ReadOnly][]). 11 | * Soft-deleting models ([pgtrigger.SoftDelete][]). 12 | * Snapshotting and tracking model changes ([django-pghistory](https://django-pghistory.readthedocs.io)). 13 | * Enforcing field transitions ([pgtrigger.FSM][]). 14 | * Keeping a search vector updated for full-text search ([pgtrigger.UpdateSearchVector][]). 15 | * Building official interfaces (e.g. enforcing use of `User.objects.create_user` and not `User.objects.create`). 16 | * Versioning models, mirroring fields, computing unique model hashes, and the list goes on... 17 | 18 | All of these examples require no overridden methods, no base models, and no signal handling. 19 | 20 | ## Quick start 21 | 22 | Install `django-pgtrigger` with `pip3 install django-pgtrigger` and add `pgtrigger` to `settings.INSTALLED_APPS`. 23 | 24 | [pgtrigger.Trigger][] objects are added to `triggers` in model `Meta`. `django-pgtrigger` comes with several trigger classes, such as [pgtrigger.Protect][]. In the following, we're protecting the model from being deleted: 25 | 26 | ```python 27 | class ProtectedModel(models.Model): 28 | """This model cannot be deleted!""" 29 | 30 | class Meta: 31 | triggers = [ 32 | pgtrigger.Protect(name='protect_deletes', operation=pgtrigger.Delete) 33 | ] 34 | ``` 35 | 36 | When migrations are created and executed, `ProtectedModel` will raise an exception anytime a deletion is attempted. 37 | 38 | Let's extend this example further and only protect deletions on active objects. In this example, the trigger conditionally runs when the row being deleted (the `OLD` row in trigger terminology) is still active: 39 | 40 | ```python 41 | class ProtectedModel(models.Model): 42 | """Active object cannot be deleted!""" 43 | is_active = models.BooleanField(default=True) 44 | 45 | class Meta: 46 | triggers = [ 47 | pgtrigger.Protect( 48 | name='protect_deletes', 49 | operation=pgtrigger.Delete, 50 | condition=pgtrigger.Q(old__is_active=True) 51 | ) 52 | ] 53 | ``` 54 | 55 | `django-pgtrigger` uses [pgtrigger.Q][] and [pgtrigger.F][] objects to conditionally execute triggers based on the `OLD` and `NEW` rows. Combining these Django idioms with [pgtrigger.Trigger][] objects can solve a wide variety of problems without ever writing SQL. Users, however, can still use raw SQL for complex cases. 56 | 57 | Triggers are installed like other database objects. Run `python manage.py makemigrations` and `python manage.py migrate` to install triggers. 58 | 59 | ## Compatibility 60 | 61 | `django-pgtrigger` is compatible with Python 3.9 - 3.13, Django 4.2 - 5.2, Psycopg 2 - 3, and Postgres 14 - 17. 62 | 63 | ## Next steps 64 | 65 | We recommend everyone first read: 66 | 67 | * [Installation](installation.md) for how to install the library. 68 | * [Basics](basics.md) for an overview and motivation. 69 | 70 | After this, there are several usage guides: 71 | 72 | * [Cookbook](cookbook.md) for trigger examples. 73 | * [Conditional Triggers](conditional.md) for all the ways one can create conditional triggers. 74 | * [Ignoring Execution](ignoring_triggers.md) for dynamically ignoring triggers. 75 | * [Deferrable Triggers](deferrable.md) for deferring trigger execution. 76 | * [Advanced Installation](advanced_installation.md) for installing triggers on third-party models, many-to-many models, programmatic installation, and more. 77 | * [Advanced Database Setups](advanced_db.md) for notes on how triggers work in multi-database, mutli-schema, or partitioned database setups. 78 | 79 | There's additional help in these sections: 80 | 81 | * [FAQ](faq.md) for common questions like testing and disabling triggers. 82 | * [Troubleshooting](troubleshooting.md) for advice on known issues. 83 | * [Upgrading](upgrading.md) for upgrading to new major versions. 84 | * [Further Reading](further_reading.md) for additional reading and videos. 85 | 86 | Finally, core API information exists in these sections: 87 | 88 | * [Settings](settings.md) for all available Django settings. 89 | * [Commands](commands.md) for using the `python manage.py pgtrigger` management commands. 90 | * [Module](module.md) for documentation of the `pgtrigger` module. 91 | * [Release Notes](release_notes.md) for information about every release. 92 | * [Contributing Guide](contributing.md) for details on contributing to the codebase. 93 | -------------------------------------------------------------------------------- /docs/installation.md: -------------------------------------------------------------------------------- 1 | # Installation 2 | 3 | Install `django-pgtrigger` with: 4 | 5 | pip3 install django-pgtrigger 6 | 7 | After this, add `pgtrigger` to the `INSTALLED_APPS` setting of your Django project. -------------------------------------------------------------------------------- /docs/module.md: -------------------------------------------------------------------------------- 1 | # Module 2 | 3 | Below are the core classes and functions of the `pgtrigger` module. 4 | 5 | ## Level clause 6 | 7 | :::pgtrigger.Row 8 | :::pgtrigger.Statement 9 | 10 | ## When clause 11 | 12 | :::pgtrigger.After 13 | :::pgtrigger.Before 14 | :::pgtrigger.InsteadOf 15 | 16 | ## Operation clause 17 | 18 | :::pgtrigger.Insert 19 | :::pgtrigger.Update 20 | :::pgtrigger.Delete 21 | :::pgtrigger.Truncate 22 | :::pgtrigger.UpdateOf 23 | 24 | ## Referencing clause 25 | 26 | :::pgtrigger.Referencing 27 | 28 | ## Timing clause 29 | 30 | :::pgtrigger.Immediate 31 | :::pgtrigger.Deferred 32 | 33 | ## Func clause 34 | 35 | :::pgtrigger.Func 36 | 37 | ## Conditions 38 | 39 | :::pgtrigger.Condition 40 | :::pgtrigger.AnyChange 41 | :::pgtrigger.AnyDontChange 42 | :::pgtrigger.AllChange 43 | :::pgtrigger.AllDontChange 44 | :::pgtrigger.Q 45 | :::pgtrigger.F 46 | :::pgtrigger.IsDistinctFrom 47 | :::pgtrigger.IsNotDistinctFrom 48 | 49 | ## Triggers 50 | 51 | :::pgtrigger.Trigger 52 | :::pgtrigger.Protect 53 | :::pgtrigger.ReadOnly 54 | :::pgtrigger.SoftDelete 55 | :::pgtrigger.FSM 56 | :::pgtrigger.UpdateSearchVector 57 | :::pgtrigger.Composer 58 | 59 | ## Runtime execution 60 | 61 | :::pgtrigger.constraints 62 | :::pgtrigger.ignore 63 | :::pgtrigger.schema 64 | 65 | ## Registry 66 | 67 | :::pgtrigger.register 68 | :::pgtrigger.registered 69 | 70 | ## Installation 71 | 72 | :::pgtrigger.install 73 | :::pgtrigger.uninstall 74 | :::pgtrigger.enable 75 | :::pgtrigger.disable 76 | :::pgtrigger.prunable 77 | :::pgtrigger.prune 78 | -------------------------------------------------------------------------------- /docs/overrides/partials/copyright.html: -------------------------------------------------------------------------------- 1 | 17 | -------------------------------------------------------------------------------- /docs/release_notes.md: -------------------------------------------------------------------------------- 1 | # Release Notes 2 | 3 | --8<-- "CHANGELOG.md:2" 4 | -------------------------------------------------------------------------------- /docs/requirements.txt: -------------------------------------------------------------------------------- 1 | ansimarkup==2.0.0 ; python_full_version >= "3.9.0" and python_version < "4" 2 | arrow==1.3.0 ; python_full_version >= "3.9.0" and python_version < "4" 3 | asgiref==3.7.2 ; python_full_version >= "3.9.0" and python_version < "4" 4 | babel==2.13.0 ; python_full_version >= "3.9.0" and python_version < "4" 5 | backrefs==5.8 ; python_version >= "3.9" and python_version < "4" 6 | binaryornot==0.4.4 ; python_full_version >= "3.9.0" and python_version < "4" 7 | black==25.1.0 ; python_version >= "3.9" and python_version < "4" 8 | build==1.2.2.post1 ; python_full_version >= "3.9.0" and python_version < "4.0" 9 | cachecontrol[filecache]==0.14.0 ; python_full_version >= "3.9.0" and python_version < "4.0" 10 | cachetools==5.5.2 ; python_full_version >= "3.9.0" and python_version < "4" 11 | certifi==2023.7.22 ; python_full_version >= "3.9.0" and python_version < "4" 12 | cffi==1.17.1 ; python_full_version >= "3.9.0" and python_version < "4.0" and (sys_platform == "darwin" or sys_platform == "linux") and (sys_platform == "darwin" or platform_python_implementation != "PyPy") 13 | chardet==5.2.0 ; python_full_version >= "3.9.0" and python_version < "4" 14 | charset-normalizer==3.3.0 ; python_full_version >= "3.9.0" and python_version < "4" 15 | cleo==2.1.0 ; python_full_version >= "3.9.0" and python_version < "4.0" 16 | click==8.1.7 ; python_version >= "3.9" and python_version < "4" 17 | colorama==0.4.6 ; python_version >= "3.9" and python_version < "4" 18 | cookiecutter==1.7.3 ; python_full_version >= "3.9.0" and python_version < "4" 19 | coverage[toml]==7.8.0 ; python_version >= "3.9" and python_version < "4" 20 | crashtest==0.4.1 ; python_full_version >= "3.9.0" and python_version < "4.0" 21 | cryptography==43.0.3 ; python_full_version >= "3.9.0" and python_version < "4.0" and sys_platform == "linux" 22 | distlib==0.3.7 ; python_full_version >= "3.9.0" and python_version < "4" 23 | dj-database-url==2.3.0 ; python_full_version >= "3.9.0" and python_version < "4" 24 | django-dynamic-fixture==4.0.1 ; python_full_version >= "3.9.0" and python_version < "4" 25 | django-pgbulk==3.2.2 ; python_full_version >= "3.9.0" and python_version < "4" 26 | django-postgres-extra==2.0.4 ; python_full_version >= "3.9.0" and python_version < "4" 27 | django-stubs-ext==5.1.3 ; python_full_version >= "3.9.0" and python_version < "4" 28 | django-stubs==5.1.3 ; python_full_version >= "3.9.0" and python_version < "4" 29 | django==4.2.6 ; python_full_version >= "3.9.0" and python_version < "4" 30 | dulwich==0.21.7 ; python_full_version >= "3.9.0" and python_version < "4.0" 31 | exceptiongroup==1.1.3 ; python_version >= "3.9" and python_version < "3.11" 32 | fastjsonschema==2.20.0 ; python_full_version >= "3.9.0" and python_version < "4.0" 33 | filelock==3.16.1 ; python_full_version >= "3.9.0" and python_version < "4" 34 | footing==0.1.4 ; python_full_version >= "3.9.0" and python_version < "4" 35 | ghp-import==2.1.0 ; python_version >= "3.9" and python_version < "4" 36 | griffe==1.7.2 ; python_version >= "3.9" and python_version < "4" 37 | idna==3.4 ; python_full_version >= "3.9.0" and python_version < "4" 38 | importlib-metadata==6.8.0 ; python_version >= "3.9" and python_version < "3.12" 39 | iniconfig==2.0.0 ; python_version >= "3.9" and python_version < "4" 40 | installer==0.7.0 ; python_full_version >= "3.9.0" and python_version < "4.0" 41 | jaraco-classes==3.4.0 ; python_full_version >= "3.9.0" and python_version < "4.0" 42 | jeepney==0.8.0 ; python_full_version >= "3.9.0" and python_version < "4.0" and sys_platform == "linux" 43 | jinja2-time==0.2.0 ; python_full_version >= "3.9.0" and python_version < "4" 44 | jinja2==3.1.2 ; python_version >= "3.9" and python_version < "4" 45 | keyring==24.3.1 ; python_full_version >= "3.9.0" and python_version < "4.0" 46 | markdown==3.7 ; python_version >= "3.9" and python_version < "4" 47 | markupsafe==2.1.3 ; python_version >= "3.9" and python_version < "4" 48 | mergedeep==1.3.4 ; python_version >= "3.9" and python_version < "4" 49 | mkdocs-autorefs==1.4.1 ; python_version >= "3.9" and python_version < "4" 50 | mkdocs-get-deps==0.2.0 ; python_version >= "3.9" and python_version < "4" 51 | mkdocs-material-extensions==1.3.1 ; python_full_version >= "3.9.0" and python_version < "4" 52 | mkdocs-material==9.6.12 ; python_full_version >= "3.9.0" and python_version < "4" 53 | mkdocs==1.6.1 ; python_version >= "3.9" and python_version < "4" 54 | mkdocstrings-python==1.16.10 ; python_version >= "3.9" and python_version < "4" 55 | mkdocstrings==0.29.1 ; python_version >= "3.9" and python_version < "4" 56 | more-itertools==10.5.0 ; python_full_version >= "3.9.0" and python_version < "4.0" 57 | msgpack==1.1.0 ; python_full_version >= "3.9.0" and python_version < "4.0" 58 | mypy-extensions==1.0.0 ; python_version >= "3.9" and python_version < "4" 59 | nodeenv==1.8.0 ; python_full_version >= "3.9.0" and python_version < "4" 60 | packaging==24.2 ; python_version >= "3.9" and python_version < "4" 61 | paginate==0.5.6 ; python_full_version >= "3.9.0" and python_version < "4" 62 | pathspec==0.11.2 ; python_version >= "3.9" and python_version < "4" 63 | pexpect==4.9.0 ; python_full_version >= "3.9.0" and python_version < "4.0" 64 | pkginfo==1.11.2 ; python_full_version >= "3.9.0" and python_version < "4.0" 65 | platformdirs==4.3.6 ; python_version >= "3.9" and python_version < "4" 66 | pluggy==1.5.0 ; python_version >= "3.9" and python_version < "4" 67 | poetry-core==1.9.1 ; python_full_version >= "3.9.0" and python_version < "4.0" 68 | poetry-plugin-export==1.8.0 ; python_full_version >= "3.9.0" and python_version < "4.0" 69 | poetry==1.8.4 ; python_full_version >= "3.9.0" and python_version < "4.0" 70 | poyo==0.5.0 ; python_full_version >= "3.9.0" and python_version < "4" 71 | psycopg2-binary==2.9.10 ; python_full_version >= "3.9.0" and python_version < "4" 72 | ptyprocess==0.7.0 ; python_full_version >= "3.9.0" and python_version < "4.0" 73 | pycparser==2.22 ; python_full_version >= "3.9.0" and python_version < "4.0" and (sys_platform == "darwin" or sys_platform == "linux") and (sys_platform == "darwin" or platform_python_implementation != "PyPy") 74 | pygments==2.16.1 ; python_full_version >= "3.9.0" and python_version < "4" 75 | pymdown-extensions==10.3 ; python_version >= "3.9" and python_version < "4" 76 | pyproject-api==1.8.0 ; python_full_version >= "3.9.0" and python_version < "4" 77 | pyproject-hooks==1.2.0 ; python_full_version >= "3.9.0" and python_version < "4.0" 78 | pyright==1.1.399 ; python_full_version >= "3.9.0" and python_version < "4" 79 | pytest-cov==6.1.1 ; python_version >= "3.9" and python_version < "4" 80 | pytest-django==4.11.1 ; python_full_version >= "3.9.0" and python_version < "4" 81 | pytest-dotenv==0.5.2 ; python_full_version >= "3.9.0" and python_version < "4" 82 | pytest-mock==3.14.0 ; python_full_version >= "3.9.0" and python_version < "4" 83 | pytest-order==1.1.0 ; python_full_version >= "3.9.0" and python_version < "4" 84 | pytest==8.3.5 ; python_version >= "3.9" and python_version < "4" 85 | python-dateutil==2.8.2 ; python_version >= "3.9" and python_version < "4" 86 | python-dotenv==1.0.0 ; python_full_version >= "3.9.0" and python_version < "4" 87 | python-gitlab==3.15.0 ; python_full_version >= "3.9.0" and python_version < "4" 88 | python-slugify==8.0.1 ; python_full_version >= "3.9.0" and python_version < "4" 89 | pywin32-ctypes==0.2.3 ; python_full_version >= "3.9.0" and python_version < "4.0" and sys_platform == "win32" 90 | pyyaml-env-tag==0.1 ; python_version >= "3.9" and python_version < "4" 91 | pyyaml==6.0.1 ; python_version >= "3.9" and python_version < "4" 92 | rapidfuzz==3.10.1 ; python_version >= "3.9" and python_version < "4.0" 93 | requests-file==1.5.1 ; python_full_version >= "3.9.0" and python_version < "4" 94 | requests-toolbelt==1.0.0 ; python_full_version >= "3.9.0" and python_version < "4" 95 | requests==2.31.0 ; python_full_version >= "3.9.0" and python_version < "4" 96 | ruff==0.11.6 ; python_full_version >= "3.9.0" and python_version < "4" 97 | secretstorage==3.3.3 ; python_full_version >= "3.9.0" and python_version < "4.0" and sys_platform == "linux" 98 | setuptools==68.2.2 ; python_full_version >= "3.9.0" and python_version < "4" 99 | shellingham==1.5.4 ; python_full_version >= "3.9.0" and python_version < "4.0" 100 | six==1.16.0 ; python_version >= "3.9" and python_version < "4" 101 | sqlparse==0.4.4 ; python_full_version >= "3.9.0" and python_version < "4" 102 | text-unidecode==1.3 ; python_full_version >= "3.9.0" and python_version < "4" 103 | tldextract==3.6.0 ; python_full_version >= "3.9.0" and python_version < "4" 104 | tomli==2.2.1 ; python_version >= "3.9" and python_full_version <= "3.11.0a6" 105 | tomlkit==0.13.2 ; python_full_version >= "3.9.0" and python_version < "4.0" 106 | tox==4.25.0 ; python_full_version >= "3.9.0" and python_version < "4" 107 | trove-classifiers==2024.10.21.16 ; python_full_version >= "3.9.0" and python_version < "4.0" 108 | types-python-dateutil==2.8.19.14 ; python_full_version >= "3.9.0" and python_version < "4" 109 | types-pyyaml==6.0.12.20240311 ; python_full_version >= "3.9.0" and python_version < "4" 110 | typing-extensions==4.13.2 ; python_version >= "3.9" and python_version < "4" 111 | tzdata==2023.3 ; python_full_version >= "3.9.0" and python_version < "4" and sys_platform == "win32" 112 | urllib3==2.0.6 ; python_full_version >= "3.9.0" and python_version < "4" 113 | virtualenv==20.30.0 ; python_full_version >= "3.9.0" and python_version < "4" 114 | watchdog==3.0.0 ; python_version >= "3.9" and python_version < "4" 115 | xattr==1.1.0 ; python_full_version >= "3.9.0" and python_version < "4.0" and sys_platform == "darwin" 116 | zipp==3.17.0 ; python_version >= "3.9" and python_version < "3.12" 117 | -------------------------------------------------------------------------------- /docs/settings.md: -------------------------------------------------------------------------------- 1 | # Settings 2 | 3 | Below are all settings for `django-pgtrigger`. 4 | 5 | ## PGTRIGGER_INSTALL_ON_MIGRATE 6 | 7 | If `True`, `python manage.py pgtrigger install` will run automatically after `python manage.py migrate`. The trigger install command will use the same database as the migrate command. This setting is unnecessary if `PGTRIGGER_MIGRATIONS` is `True`. 8 | 9 | **Default** `False` 10 | 11 | !!! warning 12 | 13 | There are known issues with this approach, such as having trigger installation issues when reversing migrations. This is a secondary way to install triggers if migrations or model meta options aren't desired. 14 | 15 | ## PGTRIGGER_MIGRATIONS 16 | 17 | If `False`, triggers will not be added to migrations when running `python manage.py makemigrations`. Triggers will need to be installed manually or with `settings.PGTRIGGER_INSTALL_ON_MIGRATE`. 18 | 19 | **Default** `True` 20 | 21 | ## PGTRIGGER_MODEL_META 22 | 23 | If `False`, triggers cannot be specified in the `triggers` attribute of model `Meta` options. Migrations will also be disabled. Triggers will need to be registered to models with [pgtrigger.register][] and installed manually or with `settings.PGTRIGGER_INSTALL_ON_MIGRATE`. 24 | 25 | **Default** `True` 26 | 27 | !!! warning 28 | 29 | Turning this off will result in an error if a third-party application declares triggers in model `Meta`. 30 | 31 | ## PGTRIGGER_PRUNE_ON_INSTALL 32 | 33 | If `True`, running `python manage.py install` or `python manage.py uninstall` with no arguments will run `python manage.py prune` to prune orphaned triggers. 34 | 35 | **Default** `True` 36 | 37 | ## PGTRIGGER_SCHEMA 38 | 39 | The schema under which global database objects are stored, such as the Postgres function used for ignoring triggers. 40 | 41 | **Default** `public` 42 | 43 | ## PGTRIGGER_SCHEMA_EDITOR 44 | 45 | If `False`, the schema editor for migrations will not be patched. Fields that are used in trigger conditions will result in migration failures if their types are changed unless the triggers are manually dropped ahead of time in the migration. 46 | 47 | **Default** `True` 48 | -------------------------------------------------------------------------------- /docs/statement.md: -------------------------------------------------------------------------------- 1 | # Statement-Level Triggers 2 | 3 | Statement-level triggers provide the ability to run triggers once-per statement, offering a significant performance advantage over row-based triggers. There are some notable differences: 4 | 5 | 1. Statement-level triggers cannot be conditionally executed, making it more cumbersome to express triggers based on changes. 6 | 2. Statement-level triggers can only fire after an operation. They cannot alter rows in memory or cause Postgres to ignore certain operations. 7 | 3. There is no guaranteed ordering of the old and new rows in [transition tables](https://dba.stackexchange.com/questions/177463/what-is-a-transition-table-in-postgres). In order to detect differences between old and new, we must join based on primary key. If the primary key is updated, we miss out on these changes. 8 | 9 | With these differences in mind, `django-pgtrigger` provides the [pgtrigger.Composer][] trigger to facilitate writing performant conditional statement-level triggers like row-level trigger counterparts. [pgtrigger.Composer][] also helps one express a trigger as both row- or statement-level functions, facilitating more advanced trigger definitions in both `django-pgtrigger` and third-party libraries. 10 | 11 | Here we go over the fundamentals of how [pgtrigger.Composer][] works and how it is used by some triggers provided by `django-pgtrigger`. 12 | 13 | ## Automatic references declaration 14 | 15 | [pgtrigger.Composer][] limits the boilerplate of statement-level triggers by automatically creating a `references` when `level=pgtrigger.Statement` is used. The `references` declaration makes `old_values` and `new_values` tables available based on the combinations of the operations provided. 16 | 17 | For example: 18 | 19 | - `pgtrigger.Update` makes `references=pgtrigger.References(old='old_values', new='new_values')` 20 | - `pgtrigger.Delete` makes `references=pgtrigger.References(old='old_values')` 21 | - `pgtrigger.Insert` makes `pgtrigger.References(new='new_values')` 22 | 23 | `pgtrigger.UpdateOf` and `pgtrigger.Truncate` always generate null references. 24 | 25 | 26 | !!! note 27 | 28 | Combinations of operations is not supported by Postgres when using transition tables, so operations like `pgtrigger.Update | pgtrigger.Delete` will result in no transition tables being declared. 29 | 30 | ## Template variables when using conditions 31 | 32 | One can use `condition` with [pgtrigger.Composer][] statement-level triggers, which provides the following template variables in [pgtrigger.Func][]: 33 | 34 | - **cond_old_values**: A fragment that has the `old_values` alias filtered by the condition. 35 | - **cond_new_values**: A fragment that has the `new_values` alias filtered by the condition. 36 | - **cond_joined_values**: A fragment that always has both `old_values` and `new_values` aliases joined and filtered by the condition. 37 | 38 | Use the minimum alias needed in your trigger to ensure best performance of generated SQL. If your trigger, for example, only needs to access conditionally-filtered olde rows, use `cond_old_values` to ensure most optimal SQL. 39 | 40 | Here's an example of a conditonal update trigger to walk through how this works: 41 | 42 | ```python 43 | pgtrigger.Composer( 44 | name="composer_protect", 45 | when=pgtrigger.After, 46 | operation=pgtrigger.Update, 47 | declare=[("val", "RECORD")], 48 | func=pgtrigger.Func( 49 | """ 50 | FOR val IN SELECT new_values.* FROM {cond_new_values} 51 | LOOP 52 | RAISE EXCEPTION 'uh oh'; 53 | END LOOP; 54 | RETURN NULL; 55 | """ 56 | ), 57 | condition=pgtrigger.Q(new__int_field__gt=0, old__int_field__lt=100), 58 | ) 59 | ``` 60 | 61 | In the above, the expanded PL/pgSQL looks like this: 62 | 63 | ```sql 64 | FOR val IN 65 | SELECT new_values.* FROM old_values 66 | JOIN new_values ON old_values.id = new_values.id 67 | WHERE new_values.int_field > 0 AND old_values.int_field < 100 68 | LOOP 69 | RAISE EXCEPTION 'uh oh'; 70 | END LOOP; 71 | RETURN NULL; 72 | ``` 73 | 74 | Since the condition spans old and new, `{cond_new_values}` automatically joins these reference tables. If we simplify our condition to not require old values, `{cond_new_values}` becomes simpler too: 75 | 76 | ```python 77 | pgtrigger.Composer( 78 | name="composer_protect", 79 | when=pgtrigger.After, 80 | operation=pgtrigger.Update, 81 | declare=[("val", "RECORD")], 82 | func=pgtrigger.Func( 83 | """ 84 | FOR val IN SELECT new_values.* FROM {cond_new_values} 85 | LOOP 86 | RAISE EXCEPTION 'uh oh'; 87 | END LOOP; 88 | RETURN NULL; 89 | """ 90 | ), 91 | condition=pgtrigger.Q(new__int_field__gt=0), 92 | ) 93 | ``` 94 | 95 | In the above, the expanded PL/pgSQL looks like this: 96 | 97 | ```sql 98 | FOR val IN 99 | SELECT new_values.* FROM new_values 100 | WHERE new_values.int_field > 0 101 | LOOP 102 | RAISE EXCEPTION 'uh oh'; 103 | END LOOP; 104 | RETURN NULL; 105 | ``` 106 | 107 | Remember the following key points when using these variables: 108 | 109 | 1. `django-pgtrigger` renders three template variables for different use cases. Depending on the condition, fragments may be simpler if they don't span old and new rows. 110 | 2. When a condition spans old and new rows or a trigger needs access to both old and new, the transition tables are automatically joined on primary key. 111 | 112 | !!! danger 113 | 114 | If your primary keys are updated, the join may filter them out and they won't be returned in the SQL fragments. Always keep this in mind when writing conditional statement-level triggers. Although it is rare that primary keys are updated, consider making a protection trigger for this case or by avoiding writing a conditional statement-level trigger altogether. 115 | 116 | ## Statement-level `Protect` and `ReadOnly` triggers 117 | 118 | [pgtrigger.Protect][] and [pgtrigger.ReadOnly][] use [pgtrigger.Composer][], providing statement-level versions of these triggers: 119 | 120 | ```python 121 | class MyModel(models.Model): 122 | class Meta: 123 | triggers = [ 124 | pgtrigger.Protect( 125 | name="protect_updates", 126 | level=pgtrigger.Statement, 127 | operation=pgtrigger.Update 128 | ) 129 | ] 130 | ``` 131 | 132 | ## Performance 133 | 134 | You may be wondering, why even use the statement-level versions of triggers or use [pgtrigger.Composer][]? It all comes down to performance. 135 | 136 | If your application is doing large bulk updates or inserts of tables, even simple row-level protection triggers are called for every row and can show up in performance measurements. Statement-level versions can be substantially faster. 137 | 138 | If not doing conditional triggers or doing conditional triggers where primary keys don't change, statement-level triggers can be much better for use cases where performance is key. 139 | 140 | Always profile results yourself. [EXPLAIN ANALYZE](https://www.postgresql.org/docs/current/sql-explain.html) will show trigger overhead. 141 | -------------------------------------------------------------------------------- /docs/static/dark_logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AmbitionEng/django-pgtrigger/2ba6f78e7da344e562ecfde0d4ec21eba646a2bf/docs/static/dark_logo.png -------------------------------------------------------------------------------- /docs/static/light_logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AmbitionEng/django-pgtrigger/2ba6f78e7da344e562ecfde0d4ec21eba646a2bf/docs/static/light_logo.png -------------------------------------------------------------------------------- /docs/troubleshooting.md: -------------------------------------------------------------------------------- 1 | # Troubleshooting 2 | 3 | ## Disabling migration integration 4 | 5 | See [this section in the docs](advanced_installation.md#turning_off_migrations) for how to disable the integration with the migration system. 6 | 7 | ## Trigger installation fails when migrating 8 | 9 | If your triggers access mutliple tables across apps, you may encounter installation issues if you haven't declared those apps as `dependencies` in the migration file. [See the Django docs](https://docs.djangoproject.com/en/4.1/topics/migrations/#dependencies) for more information. 10 | 11 | If you have `settings.PGTRIGGER_INSTALL_ON_MIGRATE` set to `True`, this can also cause trigger installation issues when migrations are reversed. Although database tables are properly reversed, triggers may be in an inconsistent state. You can use `python manage.py pgtrigger ls` to see the status of all triggers. 12 | 13 | ## Triggers are still outdated after migrating 14 | 15 | If `python manage.py pgtrigger ls` shows outdated triggers and `makemigrations` isn't showing changes, you are likely affected by a legacy issue that has been addressed as of version 4.5. The issue is normally harmless and can be corrected by upgrading or running `python manage.py pgtrigger install` to ensure triggers are up to date. 16 | 17 | ## Patches are causing the application to fail 18 | 19 | `django-pgtrigger` patches the minimum amount of Django functionality necessary to integrate with the migration system and install triggers. If this causes errors in your application, try turning off the relevant settings: 20 | 21 | * Set `settings.PGTRIGGER_SCHEMA_EDITOR` to `False` to prevent it from overriding the schema editor. Turning this off is mostly harmless, but you will have errors installing triggers if column types of trigger conditions are altered. 22 | 23 | * Set `settings.PGTRIGGER_MIGRATIONS` to `False` to completely turn off integration with the migration system. You will need to manually install triggers or set `settings.PGTRIGGER_INSTALL_ON_MIGRATE` to `True` to always install triggers after migrations. Note that this approach has limitations and bugs such as reversing migrations. 24 | 25 | * Set `settings.PGTRIGGER_MODEL_META` to `False` to disable specifying triggers in model `Meta`. You must explicitly register every trigger with [pgtrigger.register][], and triggers on third-party models may not be discovered. Integration with the migration system will also be turned off as a result. 26 | 27 | ## All triggers were updated at once 28 | 29 | A few updates, such as version 4.5, change the underlying SQL of triggers. This in turn causes all of the triggers to be updated when running `python manage.py makemigrations`. 30 | 31 | Version 4.5 made significant changes to the migration system integration to avoid this needing to happen in the future. 32 | 33 | ## Trigger migrations stall 34 | 35 | When a trigger is dropped or created, it alters the table, thus taking out the most exclusive lock possible and blocking reads the table. 36 | 37 | Migrations run in a transaction by default, meaning locks will be held until the end of the entire migration. If later operations in the migration block on acquiring locks, the previous locks will remain held until the end. This can cause extended downtime for an application. 38 | 39 | If your migration isn't doing any other table alterations such as adding columns, you can alleviate lock contention as follows: 40 | 41 | 1. Remove any `RemoveTrigger` operations if the trigger is only being updated in the migration. The `AddTrigger` operations are idempotent, so dropping them before adding them is not necessary. 42 | 2. Once all of the `RemoveTrigger` operations are gone, you can set `atomic = False` on the migration ([see the Django docs](https://docs.djangoproject.com/en/4.1/topics/migrations/#transactions)) to avoid unnecessary lock consumption. 43 | 44 | !!! danger 45 | 46 | Be sure you understand exactly what is happening when adding `atomic=False` to a migration. If there are other migration operations in the file, such as adding fields, it could create errors that are difficult to fix if the migration fails midway. If you don't remove the `RemoveTrigger` operations, you also might create a scenario where your triggers aren't installed for a period of time. -------------------------------------------------------------------------------- /docs/upgrading.md: -------------------------------------------------------------------------------- 1 | # Upgrading 2 | 3 | ## Version 3 4 | 5 | Version 3 integrates with the migration system and also drops the need for configuring `django-pgconnection` for using [pgtrigger.ignore][]. It also fully supports the `Meta.triggers` syntax for registering triggers. 6 | 7 | The majority of users can simply run `python manage.py makemigrations` after upgrading if you have no triggers registered to third-party models or many-to-many default "through" models. Read below for more details on the upgrades, and follow the special instructions if any of the former cases apply to you. 8 | 9 | ### Integration with Django migrations 10 | 11 | All triggers now appear in migrations when running `python manage.py makemigrations`. Triggers from version 2 will appear as new `AddTrigger` operations. They will succeed when running `migrate` even if previously installed. Remember, however, that triggers will be deleted if the migrations are reversed. 12 | 13 | Almost all users can simply run `python manage.py makemigrations` after upgrading. If, however, you have triggers on third-party models or many-to-many default "through" models, use these instructions to migrate them: 14 | 15 | 1. If you already ran `python manage.py makemigrations`, delete any new migrations made for these third-party apps. 16 | 2. Declare proxy models for the third-party or many-to-many "through" models, register triggers in the `Meta.triggers`, and call `python manage.py makemigrations`. See code examples in the [Advanced Installation](advanced_installation.md) section. 17 | 3. Declaring proxy models will rename old triggers, leaving them in an orphaned state since they weren't previously managed by migrations. Ensure these old triggers are removed by doing any of the following: 18 | 19 | - Make a `migrations.RunPython` operation at the end of your migration or in a new data migration that does `call_command("pgtrigger", "prune")`. Note that `call_command` is imported from `django.core.management`. 20 | - OR run `python manage.py pgtrigger prune` after your deployment is complete 21 | - OR set `settings.PGTRIGGER_INSTALL_ON_MIGRATE` to `True` for a short period of time in your settings. This will automatically prune those old triggers after deployment, and you can turn this setting back to `False` later. 22 | 23 | If you'd like to keep the legacy installation behavior and turn off migrations entirely, set `settings.PGTRIGGER_MIGRATIONS` to `False` to turn off trigger migrations and set `settings.PGTRIGGER_INSTALL_ON_MIGRATE` to `True` so that triggers are always installed at the end of `python manage.py migrate`. 24 | 25 | ### Dropping of `django-pgconnection` dependency 26 | 27 | [pgtrigger.ignore][] previously required that `django-pgconnection` was used to configure the `settings.DATABASES` setting. `django-pgconnection` is no longer needed, and `settings.DATABASES` no longer needs to be wrapped in order 28 | for [pgtrigger.ignore][] to function properly. 29 | 30 | ### New `Meta.triggers` syntax 31 | 32 | Version 2.5 introduced the ability to register triggers on your model's `Meta.triggers` list. User can still use [pgtrigger.register][] to register triggers programmatically, but it has been deprecated. 33 | 34 | ## Version 4 35 | 36 | Version 4 changes the behavior of multi-database and multi-schema usage. If you don't use multiple database and multiple schemas, the only breaking API change that might affect you is `pgtrigger.get` being renamed to [pgtrigger.registered][]. 37 | 38 | For multi-database setups, triggers are now installed on one database at a time using the `--database` argument of management commands. Triggers are only ignored on a databases based on the `allow_migrate` method of any installed routers. This mimics Django's behavior of installing tables. 39 | 40 | If you use `settings.PGTRIGGER_INSTALL_ON_MIGRATE`, triggers will only be installed for the database that was passed to `python manage.py migrate`. 41 | 42 | Version 4 adds support for multi-schema setups. See the [Advanced Database Setups](advanced_db.md) section for more information. 43 | -------------------------------------------------------------------------------- /environment.yml: -------------------------------------------------------------------------------- 1 | name: django-pgtrigger 2 | channels: 3 | - conda-forge 4 | dependencies: 5 | - python==3.13.0 6 | - poetry==1.8.4 7 | - pip==24.2 8 | - postgresql==17.0 9 | variables: 10 | DATABASE_URL: "postgres://postgres@localhost:5432/pgtrigger_local" 11 | EXEC_WRAPPER: "" 12 | -------------------------------------------------------------------------------- /footing.yaml: -------------------------------------------------------------------------------- 1 | _extensions: 2 | - jinja2_time.TimeExtension 3 | _template: git@github.com:Opus10/public-django-app-template.git 4 | _version: 7329a35244e30019e6214000335b6450de0c9110 5 | check_types_in_ci: 'False' 6 | is_django: 'True' 7 | module_name: pgtrigger 8 | repo_name: django-pgtrigger 9 | short_description: Postgres trigger support integrated with Django models. 10 | -------------------------------------------------------------------------------- /manage.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | import os 3 | import sys 4 | 5 | if __name__ == "__main__": 6 | os.environ.setdefault("DJANGO_SETTINGS_MODULE", "settings") 7 | from django.core.management import execute_from_command_line 8 | 9 | execute_from_command_line(sys.argv) 10 | -------------------------------------------------------------------------------- /mkdocs.yml: -------------------------------------------------------------------------------- 1 | site_name: django-pgtrigger 2 | docs_dir: docs 3 | 4 | repo_name: AmbitionEng/django-pgtrigger 5 | repo_url: https://github.com/AmbitionEng/django-pgtrigger 6 | 7 | plugins: 8 | - search 9 | - mkdocstrings: 10 | handlers: 11 | python: 12 | import: 13 | - https://docs.python.org/3/objects.inv 14 | - https://installer.readthedocs.io/en/stable/objects.inv 15 | - https://mkdocstrings.github.io/autorefs/objects.inv 16 | options: 17 | docstring_options: 18 | ignore_init_summary: true 19 | line_length: 80 20 | heading_level: 3 21 | merge_init_into_class: true 22 | separate_signature: true 23 | show_root_heading: true 24 | show_root_full_path: true 25 | show_root_members_full_path: true 26 | show_signature_annotations: true 27 | show_symbol_type_heading: true 28 | show_symbol_type_toc: true 29 | signature_crossrefs: true 30 | 31 | markdown_extensions: 32 | # For admonitions 33 | - admonition 34 | - pymdownx.details 35 | - pymdownx.superfences 36 | - pymdownx.highlight: 37 | anchor_linenums: true 38 | line_spans: __span 39 | pygments_lang_class: true 40 | - pymdownx.inlinehilite 41 | - pymdownx.snippets 42 | - pymdownx.superfences 43 | - tables 44 | - pymdownx.superfences: 45 | custom_fences: 46 | - name: mermaid 47 | class: mermaid 48 | format: !!python/name:pymdownx.superfences.fence_code_format 49 | - toc: 50 | permalink: true 51 | 52 | theme: 53 | custom_dir: docs/overrides 54 | name: material 55 | logo: static/dark_logo.png 56 | favicon: static/light_logo.png 57 | features: 58 | - content.code.copy 59 | - navigation.footer 60 | - navigation.path 61 | - navigation.sections 62 | - navigation.tracking 63 | - search.suggest 64 | - search.highlight 65 | - toc.follow 66 | palette: 67 | - media: "(prefers-color-scheme: light)" 68 | scheme: default 69 | primary: custom 70 | toggle: 71 | icon: material/brightness-7 72 | name: Switch to dark mode 73 | - media: "(prefers-color-scheme: dark)" 74 | scheme: slate 75 | primary: custom 76 | toggle: 77 | icon: material/brightness-4 78 | name: Switch to light mode 79 | 80 | extra_css: 81 | - css/mkdocstrings.css 82 | - css/mkdocs-material.css 83 | 84 | nav: 85 | - Overview: index.md 86 | - Installation: installation.md 87 | - Getting Started: 88 | - Basics: basics.md 89 | - Trigger Cookbook: cookbook.md 90 | - Conditional Triggers: conditional.md 91 | - Ignoring Execution: ignoring_triggers.md 92 | - Advanced Functionality: 93 | - Statement-Level Triggers: statement.md 94 | - Deferrable Triggers: deferrable.md 95 | - Installation Edge Cases: advanced_installation.md 96 | - Non-Standard Database Setups: advanced_db.md 97 | - Help: 98 | - Frequently Asked Questions: faq.md 99 | - Troubleshooting: troubleshooting.md 100 | - Upgrading: upgrading.md 101 | - Further Reading: further_reading.md 102 | - API: 103 | - Settings: settings.md 104 | - Commands: commands.md 105 | - Module: module.md 106 | - Release Notes: release_notes.md 107 | - Contributing Guide: contributing.md 108 | -------------------------------------------------------------------------------- /pgtrigger/__init__.py: -------------------------------------------------------------------------------- 1 | import django 2 | 3 | from pgtrigger.contrib import ( 4 | FSM, 5 | Composer, 6 | Protect, 7 | ReadOnly, 8 | SoftDelete, 9 | UpdateSearchVector, 10 | ) 11 | from pgtrigger.core import ( 12 | After, 13 | AllChange, 14 | AllDontChange, 15 | AnyChange, 16 | AnyDontChange, 17 | Before, 18 | Condition, 19 | Deferred, 20 | Delete, 21 | F, 22 | Func, 23 | Immediate, 24 | Insert, 25 | InsteadOf, 26 | IsDistinctFrom, 27 | IsNotDistinctFrom, 28 | Level, 29 | Operation, 30 | Operations, 31 | Q, 32 | Referencing, 33 | Row, 34 | Statement, 35 | Timing, 36 | Trigger, 37 | Truncate, 38 | Update, 39 | UpdateOf, 40 | When, 41 | ) 42 | from pgtrigger.installation import ( 43 | disable, 44 | enable, 45 | install, 46 | prunable, 47 | prune, 48 | uninstall, 49 | ) 50 | from pgtrigger.registry import ( 51 | register, 52 | registered, 53 | ) 54 | from pgtrigger.runtime import ( 55 | constraints, 56 | ignore, 57 | schema, 58 | ) 59 | from pgtrigger.version import __version__ 60 | 61 | if django.VERSION < (3, 2): # pragma: no cover 62 | default_app_config = "pgtrigger.apps.PGTriggerConfig" 63 | 64 | del django 65 | 66 | 67 | __all__ = [ 68 | "After", 69 | "AllChange", 70 | "AllDontChange", 71 | "AnyChange", 72 | "AnyDontChange", 73 | "Before", 74 | "Composer", 75 | "Condition", 76 | "constraints", 77 | "Deferred", 78 | "Delete", 79 | "disable", 80 | "enable", 81 | "F", 82 | "FSM", 83 | "Func", 84 | "ignore", 85 | "Immediate", 86 | "Insert", 87 | "install", 88 | "InsteadOf", 89 | "IsDistinctFrom", 90 | "IsNotDistinctFrom", 91 | "Level", 92 | "Operation", 93 | "Operations", 94 | "Protect", 95 | "prunable", 96 | "prune", 97 | "Q", 98 | "ReadOnly", 99 | "Referencing", 100 | "register", 101 | "registered", 102 | "Row", 103 | "schema", 104 | "SoftDelete", 105 | "Statement", 106 | "Timing", 107 | "Trigger", 108 | "Truncate", 109 | "uninstall", 110 | "Update", 111 | "UpdateOf", 112 | "UpdateSearchVector", 113 | "When", 114 | "__version__", 115 | ] 116 | -------------------------------------------------------------------------------- /pgtrigger/apps.py: -------------------------------------------------------------------------------- 1 | import django.apps 2 | import django.db.backends.postgresql.schema as postgresql_schema 3 | from django.conf import settings 4 | from django.core.management.commands import makemigrations, migrate 5 | from django.db.migrations import state 6 | from django.db.models import options 7 | from django.db.models.signals import post_migrate 8 | from django.db.utils import load_backend 9 | 10 | from pgtrigger import core, features, installation, migrations 11 | 12 | # Allow triggers to be specified in model Meta. Users can turn this 13 | # off via settings if it causes issues. If turned off, migrations 14 | # are also disabled 15 | if features.model_meta(): # pragma: no branch 16 | if "triggers" not in options.DEFAULT_NAMES: # pragma: no branch 17 | options.DEFAULT_NAMES = tuple(options.DEFAULT_NAMES) + ("triggers",) 18 | 19 | 20 | def patch_migrations(): 21 | """ 22 | Patch the autodetector and model state detection if migrations are turned on 23 | """ 24 | if features.migrations(): # pragma: no branch 25 | if "triggers" not in state.DEFAULT_NAMES: # pragma: no branch 26 | state.DEFAULT_NAMES = tuple(state.DEFAULT_NAMES) + ("triggers",) 27 | 28 | if not issubclass( # pragma: no branch 29 | makemigrations.MigrationAutodetector, migrations.MigrationAutodetectorMixin 30 | ): 31 | makemigrations.MigrationAutodetector = type( 32 | "MigrationAutodetector", 33 | (migrations.MigrationAutodetectorMixin, makemigrations.MigrationAutodetector), 34 | {}, 35 | ) 36 | 37 | if not issubclass( # pragma: no branch 38 | migrate.MigrationAutodetector, migrations.MigrationAutodetectorMixin 39 | ): 40 | migrate.MigrationAutodetector = type( 41 | "MigrationAutodetector", 42 | (migrations.MigrationAutodetectorMixin, migrate.MigrationAutodetector), 43 | {}, 44 | ) 45 | 46 | if django.VERSION >= (5, 2): 47 | makemigrations.Command.autodetector = makemigrations.MigrationAutodetector 48 | migrate.Command.autodetector = makemigrations.MigrationAutodetector 49 | 50 | 51 | def patch_schema_editor(): 52 | """ 53 | Patch the schema editor to allow for column types to be altered on 54 | trigger conditions 55 | """ 56 | if features.schema_editor(): # pragma: no branch 57 | for config in settings.DATABASES.values(): 58 | backend = load_backend(config["ENGINE"]) 59 | schema_editor_class = backend.DatabaseWrapper.SchemaEditorClass 60 | 61 | if ( 62 | schema_editor_class 63 | and issubclass( 64 | schema_editor_class, 65 | postgresql_schema.DatabaseSchemaEditor, 66 | ) 67 | and not issubclass(schema_editor_class, migrations.DatabaseSchemaEditorMixin) 68 | ): 69 | backend.DatabaseWrapper.SchemaEditorClass = type( 70 | "DatabaseSchemaEditor", 71 | (migrations.DatabaseSchemaEditorMixin, schema_editor_class), 72 | {}, 73 | ) 74 | 75 | 76 | def register_triggers_from_meta(): 77 | """ 78 | Populate the trigger registry from model `Meta.triggers` 79 | """ 80 | if features.model_meta(): # pragma: no branch 81 | for model in django.apps.apps.get_models(): 82 | triggers = getattr(model._meta, "triggers", []) 83 | for trigger in triggers: 84 | if not isinstance(trigger, core.Trigger): # pragma: no cover 85 | raise TypeError(f"Triggers in {model} Meta must be pgtrigger.Trigger classes") 86 | 87 | trigger.register(model) 88 | 89 | 90 | def install_on_migrate(using, **kwargs): 91 | if features.install_on_migrate(): 92 | installation.install(database=using) 93 | 94 | 95 | class PGTriggerConfig(django.apps.AppConfig): 96 | name = "pgtrigger" 97 | 98 | def ready(self): 99 | """ 100 | Do all necessary patching, trigger setup, and signal handler configuration 101 | """ 102 | patch_migrations() 103 | patch_schema_editor() 104 | register_triggers_from_meta() 105 | 106 | # Configure triggers to automatically be installed after migrations 107 | post_migrate.connect(install_on_migrate, sender=self) 108 | -------------------------------------------------------------------------------- /pgtrigger/compiler.py: -------------------------------------------------------------------------------- 1 | import collections 2 | import hashlib 3 | from enum import Enum 4 | from typing import Any, Optional, Sequence, Union 5 | 6 | import pgtrigger 7 | from pgtrigger import utils 8 | 9 | 10 | class _Unset(Enum): 11 | token = 0 12 | 13 | 14 | _unset = _Unset.token 15 | 16 | 17 | class UpsertTriggerSql(collections.UserString): 18 | """SQL for inserting or updating a trigger 19 | 20 | This class is intended to be versionable since migrations 21 | reference it. Older migrations need to be able to point 22 | to earlier versions of the installation template used 23 | for triggers. 24 | """ 25 | 26 | def get_template(self) -> str: 27 | """ 28 | This is v1 of the installation template. Do NOT edit 29 | this template unless you are absolutely sure it is 30 | backwards compatible, otherwise it may affect migrations 31 | that reference it. 32 | 33 | If it does need to be changed, we will need to introduce 34 | a version variable to be backwards compatible. 35 | 36 | Note: Postgres 14 has CREATE OR REPLACE syntax that 37 | we might consider using. This SQL is executed in 38 | a transaction, so dropping and recreating shouldn't 39 | be a problem. 40 | """ 41 | return """ 42 | CREATE OR REPLACE FUNCTION {ignore_func_name}( 43 | trigger_name NAME 44 | ) 45 | RETURNS BOOLEAN AS $$ 46 | DECLARE 47 | _pgtrigger_ignore TEXT[]; 48 | _result BOOLEAN; 49 | BEGIN 50 | BEGIN 51 | SELECT INTO _pgtrigger_ignore 52 | CURRENT_SETTING('pgtrigger.ignore'); 53 | EXCEPTION WHEN OTHERS THEN 54 | END; 55 | IF _pgtrigger_ignore IS NOT NULL THEN 56 | SELECT trigger_name = ANY(_pgtrigger_ignore) 57 | INTO _result; 58 | RETURN _result; 59 | ELSE 60 | RETURN FALSE; 61 | END IF; 62 | END; 63 | $$ LANGUAGE plpgsql; 64 | 65 | CREATE OR REPLACE FUNCTION {pgid}() 66 | RETURNS TRIGGER AS $$ 67 | {declare} 68 | BEGIN 69 | IF ({ignore_func_name}(TG_NAME) IS TRUE) THEN 70 | IF (TG_OP = 'DELETE') THEN 71 | RETURN OLD; 72 | ELSE 73 | RETURN NEW; 74 | END IF; 75 | END IF; 76 | {func} 77 | END; 78 | $$ LANGUAGE plpgsql; 79 | 80 | DROP TRIGGER IF EXISTS {pgid} ON {table}; 81 | CREATE {constraint} TRIGGER {pgid} 82 | {when} {operation} ON {table} 83 | {timing} 84 | {referencing} 85 | FOR EACH {level} {condition} 86 | EXECUTE PROCEDURE {execute}; 87 | 88 | COMMENT ON TRIGGER {pgid} ON {table} IS '{hash}'; 89 | """ 90 | 91 | def get_defaults(self, pgid: str) -> dict[str, str]: 92 | """ 93 | These are the default values for the installation 94 | template. Do NOT edit these default values. Keys 95 | may be added, but existing keys should never be updated, 96 | otherwise existing migrations may no longer be correct. 97 | 98 | If it does need to be changed, we will need to introduce 99 | a version variable to be backwards compatible. 100 | """ 101 | return { 102 | "ignore_func_name": '"public"._pgtrigger_should_ignore', 103 | "declare": "", 104 | "constraint": "", 105 | "timing": "", 106 | "referencing": "", 107 | "level": "ROW", 108 | "condition": "", 109 | "execute": f"{pgid}()", 110 | } 111 | 112 | def __init__( 113 | self, 114 | *, 115 | ignore_func_name: Union[str, _Unset] = _unset, 116 | pgid: str, 117 | declare: Union[str, _Unset] = _unset, 118 | func: str, 119 | table: str, 120 | constraint: Union[str, _Unset] = _unset, 121 | when: Union["pgtrigger.core.When", str, None], 122 | operation: Union["pgtrigger.core.Operation", str, None], 123 | timing: Union[str, _Unset] = _unset, 124 | referencing: Union["pgtrigger.core.Referencing", str, _Unset] = _unset, 125 | level: Union["pgtrigger.core.Level", str, _Unset] = _unset, 126 | condition: Union[str, _Unset] = _unset, 127 | execute: Union[str, _Unset] = _unset, 128 | hash: Optional[str] = None, 129 | ): 130 | """Initialize the SQL and store it in the `.data` attribute.""" 131 | self.kwargs = { 132 | key: str(val) 133 | for key, val in locals().items() 134 | if key not in ("self", "hash") and val is not _unset 135 | } 136 | self.defaults = self.get_defaults(pgid) 137 | sql_args = {**self.defaults, **self.kwargs, **{"table": utils.quote(table)}} 138 | 139 | self.hash = ( 140 | hash 141 | or hashlib.sha1( 142 | self.get_template().format(**{**sql_args, **{"hash": ""}}).encode() 143 | ).hexdigest() 144 | ) 145 | self.data = self.get_template().format(**{**sql_args, **{"hash": self.hash}}) 146 | self.pgid = pgid 147 | self.table = table 148 | 149 | def deconstruct(self) -> tuple[str, Sequence[Any], dict[str, Any]]: 150 | """ 151 | Serialize the construction of this class so that it can be used in migrations. 152 | """ 153 | kwargs = { 154 | key: val for key, val in self.kwargs.items() if self.defaults.get(key, _unset) != val 155 | } 156 | 157 | path = f"{self.__class__.__module__}.{self.__class__.__name__}" 158 | return path, [], {**kwargs, **{"hash": self.hash}} 159 | 160 | 161 | class _TriggerDdlSql(collections.UserString): 162 | def get_template(self) -> str: 163 | raise NotImplementedError 164 | 165 | def __init__(self, *, pgid: str, table: str) -> None: 166 | """Initialize the SQL and store it in the `.data` attribute.""" 167 | sql_args = {**locals(), **{"table": utils.quote(table)}} 168 | 169 | self.data = self.get_template().format(**sql_args) 170 | 171 | 172 | class DropTriggerSql(_TriggerDdlSql): 173 | """SQL for dropping a trigger 174 | 175 | Triggers are dropped in migrations, so any edits to 176 | the drop trigger template should be backwards compatible 177 | or versioned. I.e. older migrations need to always point to 178 | the SQL here 179 | """ 180 | 181 | def get_template(self) -> str: 182 | return "DROP TRIGGER IF EXISTS {pgid} ON {table};" 183 | 184 | 185 | class EnableTriggerSql(_TriggerDdlSql): 186 | """SQL for enabling a trigger 187 | 188 | We don't currently perform enabling/disabling in migrations, 189 | so this SQL can be changed without consequences to past 190 | migrations. 191 | """ 192 | 193 | def get_template(self) -> str: 194 | return "ALTER TABLE {table} ENABLE TRIGGER {pgid};" 195 | 196 | 197 | class DisableTriggerSql(_TriggerDdlSql): 198 | """SQL for disabling a trigger 199 | 200 | We don't currently perform enabling/disabling in migrations, 201 | so this SQL can be changed without consequences to past 202 | migrations. 203 | """ 204 | 205 | def get_template(self) -> str: 206 | return "ALTER TABLE {table} DISABLE TRIGGER {pgid};" 207 | 208 | 209 | class Trigger: 210 | """ 211 | A compiled trigger that's added to internal model state of migrations. It consists 212 | of a name and the trigger SQL for migrations. 213 | """ 214 | 215 | def __init__(self, *, name: Optional[str], sql: UpsertTriggerSql) -> None: 216 | self.name = name 217 | self.sql = sql 218 | assert isinstance(sql, UpsertTriggerSql) 219 | 220 | def __eq__(self, other): 221 | return ( 222 | self.__class__ == other.__class__ and self.name == other.name and self.sql == other.sql 223 | ) 224 | 225 | @property 226 | def install_sql(self) -> str: 227 | return str(self.sql) 228 | 229 | @property 230 | def uninstall_sql(self) -> str: 231 | return str(DropTriggerSql(pgid=self.sql.pgid, table=self.sql.table)) 232 | 233 | @property 234 | def enable_sql(self) -> str: 235 | return str(EnableTriggerSql(pgid=self.sql.pgid, table=self.sql.table)) 236 | 237 | @property 238 | def disable_sql(self) -> str: 239 | return str(DisableTriggerSql(pgid=self.sql.pgid, table=self.sql.table)) 240 | 241 | @property 242 | def hash(self) -> str: 243 | return self.sql.hash 244 | 245 | def deconstruct(self) -> tuple[str, Sequence[Any], dict[str, Any]]: 246 | """ 247 | Serialize the construction of this class so that it can be used in migrations. 248 | """ 249 | path = f"{self.__class__.__module__}.{self.__class__.__name__}" 250 | return path, [], {"name": self.name, "sql": self.sql} 251 | -------------------------------------------------------------------------------- /pgtrigger/features.py: -------------------------------------------------------------------------------- 1 | from django.conf import settings 2 | 3 | 4 | def model_meta(): 5 | """ 6 | True if model meta support is enabled 7 | """ 8 | return getattr(settings, "PGTRIGGER_MODEL_META", True) 9 | 10 | 11 | def schema_editor(): 12 | """ 13 | True if we are using the patched Postgres schema editor. 14 | 15 | Note that setting this to False means that we cannot easily 16 | alter columns of models that are associated with trigger 17 | conditions 18 | """ 19 | return getattr(settings, "PGTRIGGER_SCHEMA_EDITOR", True) 20 | 21 | 22 | def migrations(): 23 | """ 24 | True if migrations are enabled 25 | """ 26 | return model_meta() and getattr(settings, "PGTRIGGER_MIGRATIONS", True) 27 | 28 | 29 | def install_on_migrate(): 30 | """ 31 | True if triggers should be installed after migrations 32 | """ 33 | return getattr(settings, "PGTRIGGER_INSTALL_ON_MIGRATE", False) 34 | 35 | 36 | def schema(): 37 | """ 38 | The default schema where special objects are installed 39 | """ 40 | return getattr(settings, "PGTRIGGER_SCHEMA", "public") 41 | 42 | 43 | def prune_on_install(): 44 | """ 45 | True if triggers should be pruned on a full install or uninstall 46 | """ 47 | return getattr(settings, "PGTRIGGER_PRUNE_ON_INSTALL", True) 48 | -------------------------------------------------------------------------------- /pgtrigger/installation.py: -------------------------------------------------------------------------------- 1 | """ 2 | The primary functional API for pgtrigger 3 | """ 4 | 5 | import logging 6 | from typing import List, Tuple, Union 7 | 8 | from django.db import DEFAULT_DB_ALIAS, connections 9 | 10 | from pgtrigger import features, registry, utils 11 | 12 | # The core pgtrigger logger 13 | LOGGER = logging.getLogger("pgtrigger") 14 | 15 | 16 | def install(*uris: str, database: Union[str, None] = None) -> None: 17 | """ 18 | Install triggers. 19 | 20 | Args: 21 | *uris: URIs of triggers to install. If none are provided, 22 | all triggers are installed and orphaned triggers are pruned. 23 | database: The database. Defaults to the "default" database. 24 | """ 25 | for model, trigger in registry.registered(*uris): 26 | LOGGER.info( 27 | "pgtrigger: Installing %s trigger for %s table on %s database.", 28 | trigger, 29 | model._meta.db_table, 30 | database or DEFAULT_DB_ALIAS, 31 | ) 32 | trigger.install(model, database=database) 33 | 34 | if not uris and features.prune_on_install(): # pragma: no branch 35 | prune(database=database) 36 | 37 | 38 | def prunable(database: Union[str, None] = None) -> List[Tuple[str, str, bool, str]]: 39 | """Return triggers that are candidates for pruning 40 | 41 | Args: 42 | database: The database. Defaults to the "default" database. 43 | 44 | Returns: 45 | A list of tuples consisting of the table, trigger ID, enablement, and database 46 | """ 47 | if not utils.is_postgres(database): 48 | return [] 49 | 50 | registered = { 51 | (utils.quote(model._meta.db_table), trigger.get_pgid(model)) 52 | for model, trigger in registry.registered() 53 | } 54 | 55 | with utils.connection(database).cursor() as cursor: 56 | parent_trigger_clause = "tgparentid = 0 AND" if utils.pg_maj_version(cursor) >= 13 else "" 57 | 58 | # Only select triggers that are in the current search path. We accomplish 59 | # this by parsing the tgrelid and only selecting triggers that don't have 60 | # a schema name in their path 61 | cursor.execute( 62 | f""" 63 | SELECT tgrelid::regclass, tgname, tgenabled 64 | FROM pg_trigger 65 | WHERE tgname LIKE 'pgtrigger_%%' AND 66 | {parent_trigger_clause} 67 | array_length(parse_ident(tgrelid::regclass::varchar), 1) = 1 68 | """ 69 | ) 70 | triggers = set(cursor.fetchall()) 71 | 72 | return [ 73 | (trigger[0], trigger[1], trigger[2] == "O", database or DEFAULT_DB_ALIAS) 74 | for trigger in triggers 75 | if (utils.quote(trigger[0]), trigger[1]) not in registered 76 | ] 77 | 78 | 79 | def prune(database: Union[str, None] = None) -> None: 80 | """ 81 | Remove any pgtrigger triggers in the database that are not used by models. 82 | I.e. if a model or trigger definition is deleted from a model, ensure 83 | it is removed from the database 84 | 85 | Args: 86 | database: The database. Defaults to the "default" database. 87 | """ 88 | for trigger in prunable(database=database): 89 | LOGGER.info( 90 | "pgtrigger: Pruning trigger %s for table %s on %s database.", 91 | trigger[1], 92 | trigger[0], 93 | trigger[3], 94 | ) 95 | 96 | connection = connections[trigger[3]] 97 | uninstall_sql = utils.render_uninstall(trigger[0], trigger[1]) 98 | with connection.cursor() as cursor: 99 | cursor.execute(uninstall_sql) 100 | 101 | 102 | def enable(*uris: str, database: Union[str, None] = None) -> None: 103 | """ 104 | Enables registered triggers. 105 | 106 | Args: 107 | *uris: URIs of triggers to enable. If none are provided, 108 | all triggers are enabled. 109 | database: The database. Defaults to the "default" database. 110 | """ 111 | for model, trigger in registry.registered(*uris): 112 | LOGGER.info( 113 | "pgtrigger: Enabling %s trigger for %s table on %s database.", 114 | trigger, 115 | model._meta.db_table, 116 | database or DEFAULT_DB_ALIAS, 117 | ) 118 | trigger.enable(model, database=database) 119 | 120 | 121 | def uninstall(*uris: str, database: Union[str, None] = None) -> None: 122 | """ 123 | Uninstalls triggers. 124 | 125 | Args: 126 | *uris: URIs of triggers to uninstall. If none are provided, 127 | all triggers are uninstalled and orphaned triggers are pruned. 128 | database: The database. Defaults to the "default" database. 129 | """ 130 | for model, trigger in registry.registered(*uris): 131 | LOGGER.info( 132 | "pgtrigger: Uninstalling %s trigger for %s table on %s database.", 133 | trigger, 134 | model._meta.db_table, 135 | database or DEFAULT_DB_ALIAS, 136 | ) 137 | trigger.uninstall(model, database=database) 138 | 139 | if not uris and features.prune_on_install(): 140 | prune(database=database) 141 | 142 | 143 | def disable(*uris: str, database: Union[str, None] = None) -> None: 144 | """ 145 | Disables triggers. 146 | 147 | Args: 148 | *uris: URIs of triggers to disable. If none are provided, 149 | all triggers are disabled. 150 | database: The database. Defaults to the "default" database. 151 | """ 152 | for model, trigger in registry.registered(*uris): 153 | LOGGER.info( 154 | "pgtrigger: Disabling %s trigger for %s table on %s database.", 155 | trigger, 156 | model._meta.db_table, 157 | database or DEFAULT_DB_ALIAS, 158 | ) 159 | trigger.disable(model, database=database) 160 | -------------------------------------------------------------------------------- /pgtrigger/management/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AmbitionEng/django-pgtrigger/2ba6f78e7da344e562ecfde0d4ec21eba646a2bf/pgtrigger/management/__init__.py -------------------------------------------------------------------------------- /pgtrigger/management/commands/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AmbitionEng/django-pgtrigger/2ba6f78e7da344e562ecfde0d4ec21eba646a2bf/pgtrigger/management/commands/__init__.py -------------------------------------------------------------------------------- /pgtrigger/management/commands/pgtrigger.py: -------------------------------------------------------------------------------- 1 | import contextlib 2 | import logging 3 | 4 | from django.core.management.base import BaseCommand 5 | from django.db import DEFAULT_DB_ALIAS 6 | 7 | from pgtrigger import core, installation, registry, runtime 8 | 9 | 10 | def _setup_logging(): # pragma: no cover 11 | installation.LOGGER.addHandler(logging.StreamHandler()) 12 | if not installation.LOGGER.level: 13 | installation.LOGGER.setLevel(logging.INFO) 14 | 15 | 16 | class SubCommands(BaseCommand): # pragma: no cover 17 | """ 18 | Subcommand class vendored in from 19 | https://github.com/andrewp-as-is/django-subcommands.py 20 | because of installation issues 21 | """ 22 | 23 | argv = [] 24 | subcommands = {} 25 | 26 | def add_arguments(self, parser): 27 | subparsers = parser.add_subparsers(dest="subcommand", title="subcommands", description="") 28 | subparsers.required = True 29 | 30 | for command_name, command_class in self.subcommands.items(): 31 | command = command_class() 32 | 33 | subparser = subparsers.add_parser(command_name, help=command_class.help) 34 | command.add_arguments(subparser) 35 | prog_name = subcommand = "" 36 | if self.argv: 37 | prog_name = self.argv[0] 38 | subcommand = self.argv[1] 39 | 40 | command_parser = command.create_parser(prog_name, subcommand) 41 | subparser._actions = command_parser._actions 42 | 43 | def run_from_argv(self, argv): 44 | self.argv = argv 45 | return super().run_from_argv(argv) 46 | 47 | def handle(self, *args, **options): 48 | command_name = options["subcommand"] 49 | self.subcommands.get(command_name) 50 | command_class = self.subcommands[command_name] 51 | 52 | if self.argv: 53 | args = [self.argv[0]] + self.argv[2:] 54 | return command_class().run_from_argv(args) 55 | else: 56 | return command_class().execute(*args, **options) 57 | 58 | 59 | class BaseSchemaCommand(BaseCommand): 60 | """Sets the search path based on any "schema" option that's found""" 61 | 62 | def handle(self, *args, **options): 63 | database = options["database"] or DEFAULT_DB_ALIAS 64 | schemas = options["schema"] or [] 65 | 66 | if schemas: 67 | context = runtime.schema(*schemas, databases=[database]) 68 | else: 69 | context = contextlib.nullcontext() 70 | 71 | with context: 72 | return self.handle_with_schema(*args, **options) 73 | 74 | 75 | class LsCommand(BaseSchemaCommand): 76 | help = "List triggers and their installation state." 77 | 78 | def add_arguments(self, parser): 79 | parser.add_argument("uris", nargs="*", type=str) 80 | parser.add_argument("-d", "--database", help="The database") 81 | parser.add_argument( 82 | "-s", 83 | "--schema", 84 | action="append", 85 | help="Set the search path to this schema", 86 | ) 87 | 88 | def handle_with_schema(self, *args, **options): 89 | uris = options["uris"] 90 | 91 | status_formatted = { 92 | core.UNINSTALLED: "\033[91mUNINSTALLED\033[0m", 93 | core.INSTALLED: "\033[92mINSTALLED\033[0m", 94 | core.OUTDATED: "\033[93mOUTDATED\033[0m", 95 | core.PRUNE: "\033[96mPRUNE\033[0m", 96 | core.UNALLOWED: "\033[94mUNALLOWED\033[0m", 97 | } 98 | 99 | enabled_formatted = { 100 | True: "\033[92mENABLED\033[0m", 101 | False: "\033[91mDISABLED\033[0m", 102 | None: "\033[94mN/A\033[0m", 103 | } 104 | 105 | def _format_status(status, enabled, uri): 106 | if status in (core.UNINSTALLED, core.UNALLOWED): 107 | enabled = None 108 | 109 | return status_formatted[status], enabled_formatted[enabled], uri 110 | 111 | formatted = [] 112 | 113 | for model, trigger in registry.registered(*uris): 114 | uri = trigger.get_uri(model) 115 | status, enabled = trigger.get_installation_status(model, database=options["database"]) 116 | formatted.append(_format_status(status, enabled, uri)) 117 | 118 | if not uris: 119 | for trigger in installation.prunable(database=options["database"]): 120 | formatted.append(_format_status("PRUNE", trigger[2], f"{trigger[0]}:{trigger[1]}")) 121 | 122 | max_status_len = max(len(val) for val, _, _ in formatted) 123 | max_enabled_len = max(len(val) for _, val, _ in formatted) 124 | for status, enabled, uri in formatted: 125 | print( 126 | f"{{: <{max_status_len}}} {{: <{max_enabled_len}}} {{}}".format( 127 | status, enabled, uri 128 | ) 129 | ) 130 | 131 | 132 | class InstallCommand(BaseSchemaCommand): 133 | help = "Install triggers." 134 | 135 | def add_arguments(self, parser): 136 | parser.add_argument("uris", nargs="*", type=str) 137 | parser.add_argument("-d", "--database", help="The database") 138 | parser.add_argument( 139 | "-s", 140 | "--schema", 141 | action="append", 142 | help="Set the search path to this schema", 143 | ) 144 | 145 | def handle_with_schema(self, *args, **options): 146 | _setup_logging() 147 | installation.install(*options["uris"], database=options["database"]) 148 | 149 | 150 | class UninstallCommand(BaseSchemaCommand): 151 | help = "Uninstall triggers." 152 | 153 | def add_arguments(self, parser): 154 | parser.add_argument("uris", nargs="*", type=str) 155 | parser.add_argument("-d", "--database", help="The database") 156 | parser.add_argument( 157 | "-s", 158 | "--schema", 159 | action="append", 160 | help="Set the search path to this schema", 161 | ) 162 | 163 | def handle_with_schema(self, *args, **options): 164 | _setup_logging() 165 | installation.uninstall(*options["uris"], database=options["database"]) 166 | 167 | 168 | class EnableCommand(BaseSchemaCommand): 169 | help = "Enable triggers." 170 | 171 | def add_arguments(self, parser): 172 | parser.add_argument("uris", nargs="*", type=str) 173 | parser.add_argument("-d", "--database", help="The database") 174 | parser.add_argument( 175 | "-s", 176 | "--schema", 177 | action="append", 178 | help="Set the search path to this schema", 179 | ) 180 | 181 | def handle_with_schema(self, *args, **options): 182 | _setup_logging() 183 | installation.enable(*options["uris"], database=options["database"]) 184 | 185 | 186 | class DisableCommand(BaseSchemaCommand): 187 | help = "Disable triggers." 188 | 189 | def add_arguments(self, parser): 190 | parser.add_argument("uris", nargs="*", type=str) 191 | parser.add_argument("-d", "--database", help="The database") 192 | parser.add_argument( 193 | "-s", 194 | "--schema", 195 | action="append", 196 | help="Set the search path to this schema", 197 | ) 198 | 199 | def handle_with_schema(self, *args, **options): 200 | _setup_logging() 201 | installation.disable(*options["uris"], database=options["database"]) 202 | 203 | 204 | class PruneCommand(BaseSchemaCommand): 205 | help = "Prune installed triggers that are no longer in the codebase." 206 | 207 | def add_arguments(self, parser): 208 | parser.add_argument("-d", "--database", help="The database") 209 | parser.add_argument( 210 | "-s", 211 | "--schema", 212 | action="append", 213 | help="Set the search path to this schema", 214 | ) 215 | 216 | def handle_with_schema(self, *args, **options): 217 | _setup_logging() 218 | installation.prune(database=options["database"]) 219 | 220 | 221 | class Command(SubCommands): 222 | help = "Core django-pgtrigger subcommands." 223 | 224 | subcommands = { 225 | "ls": LsCommand, 226 | "install": InstallCommand, 227 | "uninstall": UninstallCommand, 228 | "enable": EnableCommand, 229 | "disable": DisableCommand, 230 | "prune": PruneCommand, 231 | } 232 | -------------------------------------------------------------------------------- /pgtrigger/models.py: -------------------------------------------------------------------------------- 1 | """ 2 | pgtrigger needs an empty models.py in order to get post_migrate signals 3 | needed in its app config 4 | """ 5 | -------------------------------------------------------------------------------- /pgtrigger/py.typed: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AmbitionEng/django-pgtrigger/2ba6f78e7da344e562ecfde0d4ec21eba646a2bf/pgtrigger/py.typed -------------------------------------------------------------------------------- /pgtrigger/registry.py: -------------------------------------------------------------------------------- 1 | import collections 2 | from typing import TYPE_CHECKING, Callable, List, Tuple 3 | 4 | from pgtrigger import features 5 | 6 | _unset = object() 7 | 8 | 9 | if TYPE_CHECKING: 10 | from django.db.models import Model 11 | 12 | from pgtrigger.core import Trigger 13 | 14 | 15 | # All registered triggers for each model 16 | class _Registry(collections.UserDict): 17 | @property 18 | def pg_function_names(self): 19 | """ 20 | The postgres function names of all registered triggers 21 | """ 22 | return {trigger.get_pgid(model) for model, trigger in self.values()} 23 | 24 | @property 25 | def by_db_table(self): 26 | """ 27 | Return the registry keys by db_table, name 28 | """ 29 | return {(model._meta.db_table, trigger.name): trigger for model, trigger in self.values()} 30 | 31 | def __getitem__(self, key): 32 | assert isinstance(key, str) 33 | if len(key.split(":")) == 1: 34 | raise ValueError( 35 | 'Trigger URI must be in the format of "app_label.model_name:trigger_name"' 36 | ) 37 | elif key not in _registry: 38 | raise KeyError(f'URI "{key}" not found in pgtrigger registry') 39 | 40 | return super().__getitem__(key) 41 | 42 | def __setitem__(self, key, value): 43 | assert isinstance(key, str) 44 | model, trigger = value 45 | assert f"{model._meta.label}:{trigger.name}" == key 46 | 47 | found_trigger = self.by_db_table.get((model._meta.db_table, trigger.name)) 48 | 49 | if not found_trigger or found_trigger != trigger: 50 | if found_trigger: 51 | raise KeyError( 52 | f'Trigger name "{trigger.name}" already' 53 | f' used for model "{model._meta.label}"' 54 | f' table "{model._meta.db_table}".' 55 | ) 56 | 57 | if trigger.get_pgid(model) in self.pg_function_names: 58 | raise KeyError( 59 | f'Trigger "{trigger.name}" on model "{model._meta.label}"' 60 | " has Postgres function name that's already in use." 61 | " Use a different name for the trigger." 62 | ) 63 | 64 | # Add the trigger to Meta.triggers. 65 | # Note, pgtrigger's App.ready() method auto-registers any 66 | # triggers in Meta already, meaning the trigger may already exist. If so, ignore it 67 | if features.migrations(): # pragma: no branch 68 | if trigger not in getattr(model._meta, "triggers", []): 69 | model._meta.triggers = list(getattr(model._meta, "triggers", [])) + [trigger] 70 | 71 | if trigger not in model._meta.original_attrs.get("triggers", []): 72 | model._meta.original_attrs["triggers"] = list( 73 | model._meta.original_attrs.get("triggers", []) 74 | ) + [trigger] 75 | 76 | return super().__setitem__(key, value) 77 | 78 | def __delitem__(self, key): 79 | model, trigger = self[key] 80 | 81 | super().__delitem__(key) 82 | 83 | # If we support migration integration, remove from Meta triggers 84 | if features.migrations(): # pragma: no branch 85 | model._meta.triggers.remove(trigger) 86 | # If model._meta.triggers and the original_attrs triggers are the same, 87 | # we don't need to remove it from the original_attrs 88 | if trigger in model._meta.original_attrs["triggers"]: # pragma: no branch 89 | model._meta.original_attrs["triggers"].remove(trigger) 90 | 91 | 92 | _registry = _Registry() 93 | 94 | 95 | def set(uri: str, *, model: "Model", trigger: "Trigger") -> None: 96 | """Set a trigger in the registry 97 | 98 | Args: 99 | uri: The trigger URI 100 | model: The trigger model 101 | trigger: The trigger object 102 | """ 103 | _registry[uri] = (model, trigger) 104 | 105 | 106 | def delete(uri: str) -> None: 107 | """Delete a trigger from the registry. 108 | 109 | Args: 110 | uri: The trigger URI 111 | """ 112 | del _registry[uri] 113 | 114 | 115 | def registered(*uris: str) -> List[Tuple["Model", "Trigger"]]: 116 | """ 117 | Get registered trigger objects. 118 | 119 | Args: 120 | *uris: URIs of triggers to get. If none are provided, 121 | all triggers are returned. URIs are in the format of 122 | `{app_label}.{model_name}:{trigger_name}`. 123 | 124 | Returns: 125 | Matching trigger objects. 126 | """ 127 | uris = uris or _registry.keys() 128 | return [_registry[uri] for uri in uris] 129 | 130 | 131 | def register(*triggers: "Trigger") -> Callable: 132 | """ 133 | Register the given triggers with wrapped Model class. 134 | 135 | Args: 136 | *triggers: Trigger classes to register. 137 | 138 | Example: 139 | Register by decorating a model: 140 | 141 | @pgtrigger.register( 142 | pgtrigger.Protect( 143 | name="append_only", 144 | operation=(pgtrigger.Update | pgtrigger.Delete) 145 | ) 146 | ) 147 | class MyModel(models.Model): 148 | pass 149 | 150 | Example: 151 | Register by calling functionally: 152 | 153 | pgtrigger.register(trigger_object)(MyModel) 154 | """ 155 | 156 | def _model_wrapper(model_class): 157 | for trigger in triggers: 158 | trigger.register(model_class) 159 | 160 | return model_class 161 | 162 | return _model_wrapper 163 | -------------------------------------------------------------------------------- /pgtrigger/tests/__init__.py: -------------------------------------------------------------------------------- 1 | import django 2 | 3 | if django.VERSION < (3, 2): # pragma: no cover 4 | default_app_config = "pgtrigger.tests.apps.PGTriggerTestsConfig" 5 | 6 | del django 7 | -------------------------------------------------------------------------------- /pgtrigger/tests/apps.py: -------------------------------------------------------------------------------- 1 | import django.apps 2 | from django.db import connections 3 | from django.db.models.signals import pre_migrate 4 | 5 | 6 | def install_schemas(using, **kwargs): 7 | if connections[using].vendor == "postgresql": 8 | with connections[using].cursor() as cursor: 9 | cursor.execute('CREATE SCHEMA IF NOT EXISTS "order";') 10 | cursor.execute("CREATE SCHEMA IF NOT EXISTS receipt;") 11 | 12 | 13 | class PGTriggerTestsConfig(django.apps.AppConfig): 14 | name = "pgtrigger.tests" 15 | 16 | def ready(self): 17 | """ 18 | Ensure schemas are created for test databases before migrations 19 | """ 20 | pre_migrate.connect(install_schemas, sender=self) 21 | -------------------------------------------------------------------------------- /pgtrigger/tests/conftest.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from django.core.management import call_command 3 | 4 | 5 | @pytest.fixture(scope="session") 6 | def django_db_setup(django_db_setup, django_db_blocker, request): 7 | with django_db_blocker.unblock(): 8 | # Note - schemas for databases are made in the pre-migrate hook 9 | # The django test runner only runs migrations ones per unique connection string. 10 | # Ensure that we've migrated all of our schema-based databases here 11 | call_command("migrate", database="default", verbosity=request.config.option.verbose) 12 | call_command("migrate", database="order", verbosity=request.config.option.verbose) 13 | call_command("migrate", database="receipt", verbosity=request.config.option.verbose) 14 | 15 | 16 | @pytest.fixture(autouse=True) 17 | def disable_logging(mocker): 18 | mocker.patch("pgtrigger.management.commands.pgtrigger._setup_logging", autospec=True) 19 | -------------------------------------------------------------------------------- /pgtrigger/tests/migrations/0001_initial.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 3.0.7 on 2020-06-26 22:02 2 | 3 | import django.db.models.deletion 4 | import django.utils.timezone 5 | from django.conf import settings 6 | from django.db import migrations, models 7 | 8 | 9 | class Migration(migrations.Migration): 10 | initial = True 11 | 12 | dependencies = [migrations.swappable_dependency(settings.AUTH_USER_MODEL)] 13 | 14 | operations = [ 15 | migrations.CreateModel( 16 | name="CharPk", 17 | fields=[ 18 | ( 19 | "custom_pk", 20 | models.CharField(max_length=32, primary_key=True, serialize=False), 21 | ) 22 | ], 23 | ), 24 | migrations.CreateModel( 25 | name="SoftDelete", 26 | fields=[ 27 | ( 28 | "id", 29 | models.AutoField( 30 | auto_created=True, 31 | primary_key=True, 32 | serialize=False, 33 | verbose_name="ID", 34 | ), 35 | ), 36 | ("is_active", models.BooleanField(default=True)), 37 | ("other_field", models.TextField()), 38 | ], 39 | ), 40 | migrations.CreateModel( 41 | name="TestTrigger", 42 | fields=[ 43 | ( 44 | "id", 45 | models.AutoField( 46 | auto_created=True, 47 | primary_key=True, 48 | serialize=False, 49 | verbose_name="ID", 50 | ), 51 | ), 52 | ("field", models.CharField(max_length=16)), 53 | ("int_field", models.IntegerField(default=0)), 54 | ( 55 | "dt_field", 56 | models.DateTimeField(default=django.utils.timezone.now), 57 | ), 58 | ( 59 | "nullable", 60 | models.CharField(default=None, max_length=16, null=True), 61 | ), 62 | ( 63 | "char_pk_fk_field", 64 | models.ForeignKey( 65 | null=True, 66 | on_delete=django.db.models.deletion.CASCADE, 67 | to="tests.CharPk", 68 | ), 69 | ), 70 | ( 71 | "fk_field", 72 | models.ForeignKey( 73 | null=True, 74 | on_delete=django.db.models.deletion.CASCADE, 75 | to=settings.AUTH_USER_MODEL, 76 | ), 77 | ), 78 | ], 79 | ), 80 | migrations.CreateModel( 81 | name="TestModel", 82 | fields=[ 83 | ( 84 | "id", 85 | models.AutoField( 86 | auto_created=True, 87 | primary_key=True, 88 | serialize=False, 89 | verbose_name="ID", 90 | ), 91 | ), 92 | ("int_field", models.IntegerField(null=True, unique=True)), 93 | ("char_field", models.CharField(max_length=128, null=True)), 94 | ("float_field", models.FloatField(null=True)), 95 | ], 96 | options={"unique_together": {("int_field", "char_field")}}, 97 | ), 98 | migrations.CreateModel( 99 | name="FkToSoftDelete", 100 | fields=[ 101 | ( 102 | "id", 103 | models.AutoField( 104 | auto_created=True, 105 | primary_key=True, 106 | serialize=False, 107 | verbose_name="ID", 108 | ), 109 | ), 110 | ( 111 | "ref", 112 | models.ForeignKey( 113 | on_delete=django.db.models.deletion.CASCADE, 114 | to="tests.SoftDelete", 115 | ), 116 | ), 117 | ], 118 | ), 119 | ] 120 | -------------------------------------------------------------------------------- /pgtrigger/tests/migrations/0002_logentry_tologmodel.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 3.0.7 on 2020-07-18 07:59 2 | 3 | from django.db import migrations, models 4 | 5 | 6 | class Migration(migrations.Migration): 7 | dependencies = [ 8 | ("tests", "0001_initial"), 9 | ] 10 | 11 | operations = [ 12 | migrations.CreateModel( 13 | name="LogEntry", 14 | fields=[ 15 | ( 16 | "id", 17 | models.AutoField( 18 | auto_created=True, 19 | primary_key=True, 20 | serialize=False, 21 | verbose_name="ID", 22 | ), 23 | ), 24 | ("level", models.CharField(max_length=16)), 25 | ], 26 | ), 27 | migrations.CreateModel( 28 | name="ToLogModel", 29 | fields=[ 30 | ( 31 | "id", 32 | models.AutoField( 33 | auto_created=True, 34 | primary_key=True, 35 | serialize=False, 36 | verbose_name="ID", 37 | ), 38 | ), 39 | ("field", models.CharField(max_length=16)), 40 | ], 41 | ), 42 | ] 43 | -------------------------------------------------------------------------------- /pgtrigger/tests/migrations/0003_auto_20200718_0938.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 3.0.7 on 2020-07-18 09:38 2 | 3 | from django.db import migrations, models 4 | 5 | 6 | class Migration(migrations.Migration): 7 | dependencies = [ 8 | ("tests", "0002_logentry_tologmodel"), 9 | ] 10 | 11 | operations = [ 12 | migrations.AddField( 13 | model_name="logentry", 14 | name="new_field", 15 | field=models.CharField(max_length=16, null=True), 16 | ), 17 | migrations.AddField( 18 | model_name="logentry", 19 | name="old_field", 20 | field=models.CharField(max_length=16, null=True), 21 | ), 22 | ] 23 | -------------------------------------------------------------------------------- /pgtrigger/tests/migrations/0004_fsm.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 3.0.7 on 2020-07-21 19:46 2 | 3 | from django.db import migrations, models 4 | 5 | 6 | class Migration(migrations.Migration): 7 | dependencies = [ 8 | ("tests", "0003_auto_20200718_0938"), 9 | ] 10 | 11 | operations = [ 12 | migrations.CreateModel( 13 | name="FSM", 14 | fields=[ 15 | ( 16 | "id", 17 | models.AutoField( 18 | auto_created=True, 19 | primary_key=True, 20 | serialize=False, 21 | verbose_name="ID", 22 | ), 23 | ), 24 | ("transition", models.CharField(max_length=32)), 25 | ], 26 | ), 27 | ] 28 | -------------------------------------------------------------------------------- /pgtrigger/tests/migrations/0005_customsoftdelete.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 3.0.7 on 2020-10-13 11:26 2 | 3 | from django.db import migrations, models 4 | 5 | 6 | class Migration(migrations.Migration): 7 | dependencies = [ 8 | ("tests", "0004_fsm"), 9 | ] 10 | 11 | operations = [ 12 | migrations.CreateModel( 13 | name="CustomSoftDelete", 14 | fields=[ 15 | ( 16 | "id", 17 | models.AutoField( 18 | auto_created=True, 19 | primary_key=True, 20 | serialize=False, 21 | verbose_name="ID", 22 | ), 23 | ), 24 | ("custom_active", models.BooleanField(default=True)), 25 | ("other_field", models.TextField()), 26 | ], 27 | ), 28 | ] 29 | -------------------------------------------------------------------------------- /pgtrigger/tests/migrations/0006_customtablename.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 3.2.3 on 2022-07-30 09:50 2 | 3 | from django.db import migrations, models 4 | 5 | 6 | class Migration(migrations.Migration): 7 | dependencies = [ 8 | ("tests", "0005_customsoftdelete"), 9 | ] 10 | 11 | operations = [ 12 | migrations.CreateModel( 13 | name="CustomTableName", 14 | fields=[ 15 | ( 16 | "id", 17 | models.AutoField( 18 | auto_created=True, primary_key=True, serialize=False, verbose_name="ID" 19 | ), 20 | ), 21 | ("int_field", models.IntegerField(null=True, unique=True)), 22 | ], 23 | options={ 24 | "db_table": "order", 25 | }, 26 | ), 27 | ] 28 | -------------------------------------------------------------------------------- /pgtrigger/tests/migrations/0007_auto_20220808_1055.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 3.2.15 on 2022-08-08 10:55 2 | 3 | from django.conf import settings 4 | from django.db import migrations, models 5 | 6 | 7 | class Migration(migrations.Migration): 8 | dependencies = [ 9 | migrations.swappable_dependency(settings.AUTH_USER_MODEL), 10 | ("tests", "0006_customtablename"), 11 | ] 12 | 13 | operations = [ 14 | migrations.CreateModel( 15 | name="TestTriggerProxy", 16 | fields=[], 17 | options={ 18 | "proxy": True, 19 | "indexes": [], 20 | "constraints": [], 21 | }, 22 | bases=("tests.testtrigger",), 23 | ), 24 | migrations.AddField( 25 | model_name="testtrigger", 26 | name="m2m_field", 27 | field=models.ManyToManyField( 28 | related_name="_tests_testtrigger_m2m_field_+", to=settings.AUTH_USER_MODEL 29 | ), 30 | ), 31 | migrations.CreateModel( 32 | name="TestDefaultThrough", 33 | fields=[], 34 | options={ 35 | "proxy": True, 36 | "indexes": [], 37 | "constraints": [], 38 | }, 39 | bases=("tests.testtrigger_m2m_field",), 40 | ), 41 | ] 42 | -------------------------------------------------------------------------------- /pgtrigger/tests/migrations/0008_searchmodel.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 2.2 on 2022-08-10 19:13 2 | 3 | import django.contrib.postgres.search 4 | from django.db import migrations, models 5 | 6 | 7 | class Migration(migrations.Migration): 8 | dependencies = [ 9 | ("tests", "0007_auto_20220808_1055"), 10 | ] 11 | 12 | operations = [ 13 | migrations.CreateModel( 14 | name="SearchModel", 15 | fields=[ 16 | ( 17 | "id", 18 | models.AutoField( 19 | auto_created=True, primary_key=True, serialize=False, verbose_name="ID" 20 | ), 21 | ), 22 | ("body_vector", django.contrib.postgres.search.SearchVectorField()), 23 | ("title_body_vector", django.contrib.postgres.search.SearchVectorField()), 24 | ("title", models.CharField(max_length=128)), 25 | ("body", models.TextField()), 26 | ], 27 | ), 28 | ] 29 | -------------------------------------------------------------------------------- /pgtrigger/tests/migrations/0009_orderschema_receiptschema.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 3.2.15 on 2022-08-11 11:37 2 | 3 | from django.db import migrations, models 4 | 5 | 6 | class Migration(migrations.Migration): 7 | dependencies = [ 8 | ("tests", "0008_searchmodel"), 9 | ] 10 | 11 | operations = [ 12 | migrations.CreateModel( 13 | name="OrderSchema", 14 | fields=[ 15 | ( 16 | "id", 17 | models.AutoField( 18 | auto_created=True, primary_key=True, serialize=False, verbose_name="ID" 19 | ), 20 | ), 21 | ("int_field", models.IntegerField()), 22 | ], 23 | ), 24 | migrations.CreateModel( 25 | name="ReceiptSchema", 26 | fields=[ 27 | ( 28 | "id", 29 | models.AutoField( 30 | auto_created=True, primary_key=True, serialize=False, verbose_name="ID" 31 | ), 32 | ), 33 | ("char_field", models.CharField(max_length=128)), 34 | ], 35 | options={ 36 | "db_table": "table.with.dots", 37 | }, 38 | ), 39 | ] 40 | -------------------------------------------------------------------------------- /pgtrigger/tests/migrations/0010_auto_20220817_2211.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 3.2.15 on 2022-08-17 22:11 2 | 3 | import psqlextra.backend.migrations.operations.add_default_partition 4 | import psqlextra.backend.migrations.operations.create_partitioned_model 5 | import psqlextra.manager.manager 6 | import psqlextra.models.partitioned 7 | import psqlextra.types 8 | from django.db import migrations, models 9 | 10 | 11 | class Migration(migrations.Migration): 12 | dependencies = [ 13 | ("tests", "0009_orderschema_receiptschema"), 14 | ] 15 | 16 | operations = [ 17 | psqlextra.backend.migrations.operations.create_partitioned_model.PostgresCreatePartitionedModel( # noqa 18 | name="PartitionModel", 19 | fields=[ 20 | ( 21 | "id", 22 | models.AutoField( 23 | auto_created=True, primary_key=True, serialize=False, verbose_name="ID" 24 | ), 25 | ), 26 | ("name", models.TextField()), 27 | ("timestamp", models.DateTimeField()), 28 | ], 29 | options={ 30 | "abstract": False, 31 | "base_manager_name": "objects", 32 | }, 33 | partitioning_options={ 34 | "method": psqlextra.types.PostgresPartitioningMethod["RANGE"], 35 | "key": ["timestamp"], 36 | }, 37 | bases=(psqlextra.models.partitioned.PostgresPartitionedModel,), 38 | managers=[ 39 | ("objects", psqlextra.manager.manager.PostgresManager()), 40 | ], 41 | ), 42 | psqlextra.backend.migrations.operations.add_default_partition.PostgresAddDefaultPartition( 43 | model_name="PartitionModel", 44 | name="default", 45 | ), 46 | ] 47 | -------------------------------------------------------------------------------- /pgtrigger/tests/migrations/0011_auto_20220817_2211.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 3.2.15 on 2022-08-17 22:11 2 | 3 | from django.db import migrations 4 | from psqlextra.backend.migrations.operations import PostgresAddRangePartition 5 | 6 | 7 | class Migration(migrations.Migration): 8 | dependencies = [ 9 | ("tests", "0010_auto_20220817_2211"), 10 | ] 11 | 12 | operations = [ 13 | PostgresAddRangePartition( 14 | model_name="partitionmodel", 15 | name="pt1", 16 | from_values="2019-01-01", 17 | to_values="2019-02-01", 18 | ), 19 | PostgresAddRangePartition( 20 | model_name="partitionmodel", 21 | name="pt2", 22 | from_values="2019-02-01", 23 | to_values="2019-03-01", 24 | ), 25 | PostgresAddRangePartition( 26 | model_name="partitionmodel", 27 | name="pt3", 28 | from_values="2019-03-01", 29 | to_values="2019-04-01", 30 | ), 31 | ] 32 | -------------------------------------------------------------------------------- /pgtrigger/tests/migrations/0012_alter_partitionmodel_options.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 3.2.15 on 2022-08-18 10:48 2 | 3 | from django.db import migrations 4 | 5 | 6 | class Migration(migrations.Migration): 7 | dependencies = [ 8 | ("tests", "0011_auto_20220817_2211"), 9 | ] 10 | 11 | operations = [ 12 | migrations.AlterModelOptions( 13 | name="partitionmodel", 14 | options={}, 15 | ), 16 | ] 17 | -------------------------------------------------------------------------------- /pgtrigger/tests/migrations/0013_alter_testtrigger_m2m_field_changedcondition.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.2.6 on 2023-10-11 20:50 2 | 3 | import django.db.models.deletion 4 | from django.conf import settings 5 | from django.db import migrations, models 6 | 7 | 8 | class Migration(migrations.Migration): 9 | dependencies = [ 10 | migrations.swappable_dependency(settings.AUTH_USER_MODEL), 11 | ("tests", "0012_alter_partitionmodel_options"), 12 | ] 13 | 14 | operations = [ 15 | migrations.AlterField( 16 | model_name="testtrigger", 17 | name="m2m_field", 18 | field=models.ManyToManyField(related_name="+", to=settings.AUTH_USER_MODEL), 19 | ), 20 | migrations.CreateModel( 21 | name="ChangedCondition", 22 | fields=[ 23 | ( 24 | "id", 25 | models.AutoField( 26 | auto_created=True, primary_key=True, serialize=False, verbose_name="ID" 27 | ), 28 | ), 29 | ("field", models.CharField(max_length=16)), 30 | ("int_field", models.IntegerField(default=0)), 31 | ("dt_field", models.DateTimeField(auto_now=True)), 32 | ("nullable", models.CharField(default=None, max_length=16, null=True)), 33 | ( 34 | "char_pk_fk_field", 35 | models.ForeignKey( 36 | null=True, on_delete=django.db.models.deletion.CASCADE, to="tests.charpk" 37 | ), 38 | ), 39 | ( 40 | "fk_field", 41 | models.ForeignKey( 42 | null=True, 43 | on_delete=django.db.models.deletion.CASCADE, 44 | to=settings.AUTH_USER_MODEL, 45 | ), 46 | ), 47 | ( 48 | "m2m_field", 49 | models.ManyToManyField(related_name="+", to=settings.AUTH_USER_MODEL), 50 | ), 51 | ], 52 | ), 53 | ] 54 | -------------------------------------------------------------------------------- /pgtrigger/tests/migrations/0014_softdeletecompositepk.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 5.2 on 2025-04-18 08:55 2 | 3 | import django 4 | from django.db import migrations, models 5 | 6 | 7 | def get_operations(): 8 | if django.VERSION >= (5, 2): 9 | return [ 10 | migrations.CreateModel( 11 | name="SoftDeleteCompositePk", 12 | fields=[ 13 | ("id_1", models.IntegerField()), 14 | ("id_2", models.IntegerField()), 15 | ( 16 | "pk", 17 | models.CompositePrimaryKey( 18 | "id_1", 19 | "id_2", 20 | blank=True, 21 | editable=False, 22 | primary_key=True, 23 | serialize=False, 24 | ), 25 | ), 26 | ("is_active", models.BooleanField(default=True)), 27 | ("other_field", models.TextField()), 28 | ], 29 | ), 30 | ] 31 | else: # pragma: no cover 32 | return [] 33 | 34 | 35 | class Migration(migrations.Migration): 36 | dependencies = [ 37 | ("tests", "0013_alter_testtrigger_m2m_field_changedcondition"), 38 | ] 39 | 40 | operations = get_operations() 41 | -------------------------------------------------------------------------------- /pgtrigger/tests/migrations/0015_concretechild_abstractchild.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 5.2 on 2025-04-25 20:39 2 | 3 | import django.db.models.deletion 4 | from django.conf import settings 5 | from django.db import migrations, models 6 | 7 | 8 | class Migration(migrations.Migration): 9 | dependencies = [ 10 | ("tests", "0014_softdeletecompositepk"), 11 | migrations.swappable_dependency(settings.AUTH_USER_MODEL), 12 | ] 13 | 14 | operations = [ 15 | migrations.CreateModel( 16 | name="ConcreteChild", 17 | fields=[ 18 | ( 19 | "changedcondition_ptr", 20 | models.OneToOneField( 21 | auto_created=True, 22 | on_delete=django.db.models.deletion.CASCADE, 23 | parent_link=True, 24 | primary_key=True, 25 | serialize=False, 26 | to="tests.changedcondition", 27 | ), 28 | ), 29 | ("child_field", models.CharField(max_length=16)), 30 | ], 31 | bases=("tests.changedcondition",), 32 | ), 33 | migrations.CreateModel( 34 | name="AbstractChild", 35 | fields=[ 36 | ( 37 | "id", 38 | models.AutoField( 39 | auto_created=True, 40 | primary_key=True, 41 | serialize=False, 42 | verbose_name="ID", 43 | ), 44 | ), 45 | ("field", models.CharField(max_length=16)), 46 | ("int_field", models.IntegerField(default=0)), 47 | ("dt_field", models.DateTimeField(auto_now=True)), 48 | ("nullable", models.CharField(default=None, max_length=16, null=True)), 49 | ("child_field", models.CharField(max_length=16)), 50 | ( 51 | "char_pk_fk_field", 52 | models.ForeignKey( 53 | null=True, 54 | on_delete=django.db.models.deletion.CASCADE, 55 | to="tests.charpk", 56 | ), 57 | ), 58 | ( 59 | "fk_field", 60 | models.ForeignKey( 61 | null=True, 62 | on_delete=django.db.models.deletion.CASCADE, 63 | to=settings.AUTH_USER_MODEL, 64 | ), 65 | ), 66 | ( 67 | "m2m_field", 68 | models.ManyToManyField(related_name="+", to=settings.AUTH_USER_MODEL), 69 | ), 70 | ], 71 | options={ 72 | "abstract": False, 73 | }, 74 | ), 75 | ] 76 | -------------------------------------------------------------------------------- /pgtrigger/tests/migrations/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AmbitionEng/django-pgtrigger/2ba6f78e7da344e562ecfde0d4ec21eba646a2bf/pgtrigger/tests/migrations/__init__.py -------------------------------------------------------------------------------- /pgtrigger/tests/models.py: -------------------------------------------------------------------------------- 1 | import django 2 | from django.contrib.auth.models import User 3 | from django.contrib.postgres.search import SearchVectorField 4 | from django.db import connections, models 5 | from django.utils import timezone 6 | from psqlextra.models import PostgresPartitionedModel 7 | from psqlextra.types import PostgresPartitioningMethod 8 | 9 | import pgtrigger 10 | import pgtrigger.utils 11 | 12 | 13 | def _get_pg_maj_version(db): # pragma: no cover 14 | connection = connections[db] 15 | if connection.vendor == "postgresql": 16 | with connection.cursor() as cursor: 17 | return pgtrigger.utils.pg_maj_version(cursor) 18 | 19 | 20 | class Router: 21 | route_app_labels = ["tests"] 22 | 23 | def allow_migrate(self, db, app_label, model_name=None, **hints): 24 | """ 25 | Ignore the parititon model for the "other" DB, for non-Postgres DBs, 26 | and for Postgres DBs that are less than version 13 27 | """ 28 | pg_maj_version = _get_pg_maj_version(db) 29 | 30 | if model_name == "partitionmodel" and ( 31 | db in ("sqlite", "other") or not pg_maj_version or pg_maj_version < 13 32 | ): 33 | return False 34 | 35 | 36 | class PartitionModel(PostgresPartitionedModel): 37 | class PartitioningMeta: 38 | method = PostgresPartitioningMethod.RANGE 39 | key = ["timestamp"] 40 | 41 | name = models.TextField() 42 | timestamp = models.DateTimeField() 43 | 44 | class Meta: 45 | triggers = [pgtrigger.Protect(name="protect_delete", operation=pgtrigger.Delete)] 46 | 47 | 48 | class OrderSchema(models.Model): 49 | """A model that only appears in the "schema1" schema""" 50 | 51 | int_field = models.IntegerField() 52 | 53 | 54 | class ReceiptSchema(models.Model): 55 | """A model that only appears in the "schema2" schema""" 56 | 57 | char_field = models.CharField(max_length=128) 58 | 59 | class Meta: 60 | db_table = "table.with.dots" 61 | 62 | 63 | class SearchModel(models.Model): 64 | body_vector = SearchVectorField() 65 | title_body_vector = SearchVectorField() 66 | 67 | title = models.CharField(max_length=128) 68 | body = models.TextField() 69 | 70 | class Meta: 71 | triggers = [ 72 | pgtrigger.UpdateSearchVector( 73 | name="add_body_to_vector", vector_field="body_vector", document_fields=["body"] 74 | ), 75 | pgtrigger.UpdateSearchVector( 76 | name="add_body_title_to_vector", 77 | vector_field="title_body_vector", 78 | document_fields=["body", "title"], 79 | ), 80 | ] 81 | 82 | 83 | @pgtrigger.register( 84 | pgtrigger.Protect(name="protect_delete", operation=pgtrigger.Delete), 85 | ) 86 | class CustomTableName(models.Model): 87 | int_field = models.IntegerField(null=True, unique=True) 88 | 89 | class Meta: 90 | db_table = "order" 91 | 92 | 93 | class TestModel(models.Model): 94 | int_field = models.IntegerField(null=True, unique=True) 95 | char_field = models.CharField(max_length=128, null=True) 96 | float_field = models.FloatField(null=True) 97 | 98 | class Meta: 99 | unique_together = ("int_field", "char_field") 100 | 101 | 102 | class LogEntry(models.Model): 103 | """Created when ToLogModel is updated""" 104 | 105 | level = models.CharField(max_length=16) 106 | old_field = models.CharField(max_length=16, null=True) 107 | new_field = models.CharField(max_length=16, null=True) 108 | 109 | 110 | class ToLogModel(models.Model): 111 | """For testing triggers that log records at statement and row level""" 112 | 113 | field = models.CharField(max_length=16) 114 | 115 | class Meta: 116 | triggers = [ 117 | pgtrigger.Trigger( 118 | name="update_of_statement_test", 119 | level=pgtrigger.Statement, 120 | operation=pgtrigger.UpdateOf("field"), 121 | when=pgtrigger.After, 122 | func=pgtrigger.Func( 123 | f""" 124 | INSERT INTO {LogEntry._meta.db_table}(level) 125 | VALUES ('STATEMENT'); 126 | RETURN NULL; 127 | """ 128 | ), 129 | ), 130 | pgtrigger.Trigger( 131 | name="after_update_statement_test", 132 | level=pgtrigger.Statement, 133 | operation=pgtrigger.Update, 134 | when=pgtrigger.After, 135 | referencing=pgtrigger.Referencing(old="old_values", new="new_values"), 136 | func=f""" 137 | INSERT INTO {LogEntry._meta.db_table}(level, old_field, new_field) 138 | SELECT 'STATEMENT' AS level, 139 | old_values.field AS old_field, 140 | new_values.field AS new_field 141 | FROM old_values 142 | JOIN new_values ON old_values.id = new_values.id; 143 | RETURN NULL; 144 | """, 145 | ), 146 | pgtrigger.Trigger( 147 | name="after_update_row_test", 148 | level=pgtrigger.Row, 149 | operation=pgtrigger.Update, 150 | when=pgtrigger.After, 151 | condition=pgtrigger.Q(old__field__df=pgtrigger.F("new__field")), 152 | func=( 153 | f"INSERT INTO {LogEntry._meta.db_table}(level) VALUES ('ROW'); RETURN NULL;" 154 | ), 155 | ), 156 | ] 157 | 158 | 159 | class CharPk(models.Model): 160 | custom_pk = models.CharField(primary_key=True, max_length=32) 161 | 162 | 163 | class TestTrigger(models.Model): 164 | """ 165 | For testing triggers 166 | """ 167 | 168 | field = models.CharField(max_length=16) 169 | int_field = models.IntegerField(default=0) 170 | dt_field = models.DateTimeField(default=timezone.now) 171 | nullable = models.CharField(null=True, default=None, max_length=16) 172 | fk_field = models.ForeignKey("auth.User", null=True, on_delete=models.CASCADE) 173 | char_pk_fk_field = models.ForeignKey(CharPk, null=True, on_delete=models.CASCADE) 174 | m2m_field = models.ManyToManyField(User, related_name="+") 175 | 176 | class Meta: 177 | triggers = [ 178 | pgtrigger.Trigger( 179 | name="protect_misc_insert", 180 | when=pgtrigger.Before, 181 | operation=pgtrigger.Insert, 182 | func="RAISE EXCEPTION 'no no no!';", 183 | condition=pgtrigger.Q(new__field="misc_insert"), 184 | ), 185 | ] 186 | 187 | 188 | class TestTriggerProxy(TestTrigger): 189 | """ 190 | For testing triggers on proxy models 191 | """ 192 | 193 | class Meta: 194 | proxy = True 195 | triggers = [ 196 | pgtrigger.Protect(name="protect_delete", operation=pgtrigger.Delete), 197 | ] 198 | 199 | 200 | class TestDefaultThrough(TestTrigger.m2m_field.through): 201 | class Meta: 202 | proxy = True 203 | triggers = [ 204 | pgtrigger.Protect(name="protect_it", operation=pgtrigger.Delete), 205 | ] 206 | 207 | 208 | @pgtrigger.register(pgtrigger.SoftDelete(name="soft_delete", field="is_active")) 209 | class SoftDelete(models.Model): 210 | """ 211 | For testing soft deletion. Deletions on this model will set 212 | is_active = False without deleting the model 213 | """ 214 | 215 | is_active = models.BooleanField(default=True) 216 | other_field = models.TextField() 217 | 218 | 219 | class FkToSoftDelete(models.Model): 220 | """Ensures foreign keys to a soft delete model are deleted""" 221 | 222 | ref = models.ForeignKey(SoftDelete, on_delete=models.CASCADE) 223 | 224 | 225 | if django.VERSION >= (5, 2): 226 | 227 | @pgtrigger.register(pgtrigger.SoftDelete(name="soft_delete_composite_pk", field="is_active")) 228 | class SoftDeleteCompositePk(models.Model): 229 | """ 230 | For testing soft deletion with a composite primary key. 231 | """ 232 | 233 | id_1 = models.IntegerField() 234 | id_2 = models.IntegerField() 235 | pk = models.CompositePrimaryKey("id_1", "id_2") 236 | is_active = models.BooleanField(default=True) 237 | other_field = models.TextField() 238 | 239 | 240 | @pgtrigger.register(pgtrigger.SoftDelete(name="soft_delete", field="custom_active")) 241 | class CustomSoftDelete(models.Model): 242 | """ 243 | For testing soft deletion with a custom active field. 244 | 245 | This trigger also helps ensure that triggers can have the same names 246 | across multiple models. 247 | """ 248 | 249 | custom_active = models.BooleanField(default=True) 250 | other_field = models.TextField() 251 | 252 | 253 | @pgtrigger.register( 254 | pgtrigger.FSM( 255 | name="fsm", 256 | field="transition", 257 | transitions=[("unpublished", "published"), ("published", "inactive")], 258 | ) 259 | ) 260 | class FSM(models.Model): 261 | """Tests valid transitions of a field""" 262 | 263 | transition = models.CharField(max_length=32) 264 | 265 | 266 | class ChangedCondition(models.Model): 267 | """ 268 | For testing changed conditions 269 | """ 270 | 271 | field = models.CharField(max_length=16) 272 | int_field = models.IntegerField(default=0) 273 | dt_field = models.DateTimeField(auto_now=True) 274 | nullable = models.CharField(null=True, default=None, max_length=16) 275 | fk_field = models.ForeignKey("auth.User", null=True, on_delete=models.CASCADE) 276 | char_pk_fk_field = models.ForeignKey(CharPk, null=True, on_delete=models.CASCADE) 277 | m2m_field = models.ManyToManyField(User, related_name="+") 278 | 279 | 280 | class ConcreteChild(ChangedCondition): 281 | child_field = models.CharField(max_length=16) 282 | 283 | 284 | class AbstractChangedCondition(models.Model): 285 | """ 286 | For testing changed conditions 287 | """ 288 | 289 | field = models.CharField(max_length=16) 290 | int_field = models.IntegerField(default=0) 291 | dt_field = models.DateTimeField(auto_now=True) 292 | nullable = models.CharField(null=True, default=None, max_length=16) 293 | fk_field = models.ForeignKey("auth.User", null=True, on_delete=models.CASCADE) 294 | char_pk_fk_field = models.ForeignKey(CharPk, null=True, on_delete=models.CASCADE) 295 | m2m_field = models.ManyToManyField(User, related_name="+") 296 | 297 | class Meta: 298 | abstract = True 299 | 300 | 301 | class AbstractChild(AbstractChangedCondition): 302 | child_field = models.CharField(max_length=16) 303 | -------------------------------------------------------------------------------- /pgtrigger/tests/syncdb_app/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AmbitionEng/django-pgtrigger/2ba6f78e7da344e562ecfde0d4ec21eba646a2bf/pgtrigger/tests/syncdb_app/__init__.py -------------------------------------------------------------------------------- /pgtrigger/tests/syncdb_app/apps.py: -------------------------------------------------------------------------------- 1 | import django.apps 2 | 3 | 4 | class PGTriggerTestsSyncdbAppConfig(django.apps.AppConfig): 5 | name = "pgtrigger.tests.syncdb_app" 6 | -------------------------------------------------------------------------------- /pgtrigger/tests/syncdb_app/models.py: -------------------------------------------------------------------------------- 1 | from django.apps.registry import Apps 2 | from django.db import models 3 | from django.utils import timezone 4 | 5 | import pgtrigger 6 | 7 | syncdb_apps = Apps() 8 | 9 | 10 | class NoMigrationModel(models.Model): 11 | """ 12 | For testing triggers installed with syncdb 13 | """ 14 | 15 | field = models.CharField(max_length=16) 16 | int_field = models.IntegerField(default=0) 17 | dt_field = models.DateTimeField(default=timezone.now) 18 | nullable = models.CharField(null=True, default=None, max_length=16) 19 | 20 | class Meta: 21 | apps = syncdb_apps 22 | triggers = [ 23 | pgtrigger.Trigger( 24 | name="protect_misc_insert", 25 | when=pgtrigger.Before, 26 | operation=pgtrigger.Insert, 27 | func="RAISE EXCEPTION 'no no no!';", 28 | condition=pgtrigger.Q(new__field="misc_insert"), 29 | ), 30 | ] 31 | -------------------------------------------------------------------------------- /pgtrigger/tests/test_commands.py: -------------------------------------------------------------------------------- 1 | # flake8: noqa 2 | from unittest import mock 3 | 4 | from django.core.management import call_command 5 | import pytest 6 | 7 | import pgtrigger 8 | from pgtrigger import registry 9 | 10 | 11 | @pytest.mark.django_db 12 | def test_full_ls(capsys): 13 | """Tests listing all triggers""" 14 | 15 | call_command("pgtrigger", "ls") 16 | 17 | lines = capsys.readouterr().out.split("\n") 18 | expected_lines = [ 19 | "", 20 | "\x1b[92mINSTALLED\x1b[0m \x1b[92mENABLED\x1b[0m tests.CustomSoftDelete:soft_delete", 21 | "\x1b[92mINSTALLED\x1b[0m \x1b[92mENABLED\x1b[0m tests.CustomTableName:protect_delete", 22 | "\x1b[92mINSTALLED\x1b[0m \x1b[92mENABLED\x1b[0m tests.FSM:fsm", 23 | "\x1b[92mINSTALLED\x1b[0m \x1b[92mENABLED\x1b[0m tests.SearchModel:add_body_title_to_vector", 24 | "\x1b[92mINSTALLED\x1b[0m \x1b[92mENABLED\x1b[0m tests.SearchModel:add_body_to_vector", 25 | "\x1b[92mINSTALLED\x1b[0m \x1b[92mENABLED\x1b[0m tests.SoftDelete:soft_delete", 26 | "\x1b[92mINSTALLED\x1b[0m \x1b[92mENABLED\x1b[0m tests.TestDefaultThrough:protect_it", 27 | "\x1b[92mINSTALLED\x1b[0m \x1b[92mENABLED\x1b[0m tests.TestTrigger:protect_misc_insert", 28 | "\x1b[92mINSTALLED\x1b[0m \x1b[92mENABLED\x1b[0m tests.TestTriggerProxy:protect_delete", 29 | "\x1b[92mINSTALLED\x1b[0m \x1b[92mENABLED\x1b[0m tests.ToLogModel:after_update_row_test", 30 | "\x1b[92mINSTALLED\x1b[0m \x1b[92mENABLED\x1b[0m tests.ToLogModel:after_update_statement_test", 31 | "\x1b[92mINSTALLED\x1b[0m \x1b[92mENABLED\x1b[0m tests.ToLogModel:update_of_statement_test", 32 | ] 33 | assert set(expected_lines).issubset(set(lines)) 34 | 35 | 36 | @pytest.mark.django_db 37 | def test_subset_ls(capsys): 38 | """Tests listing some triggers""" 39 | 40 | call_command( 41 | "pgtrigger", 42 | "ls", 43 | "tests.SoftDelete:soft_delete", 44 | "tests.TestTriggerProxy:protect_delete", 45 | ) 46 | 47 | lines = capsys.readouterr().out.split("\n") 48 | assert set(lines) == set( 49 | [ 50 | "", 51 | "\x1b[92mINSTALLED\x1b[0m \x1b[92mENABLED\x1b[0m tests.SoftDelete:soft_delete", 52 | "\x1b[92mINSTALLED\x1b[0m \x1b[92mENABLED\x1b[0m tests.TestTriggerProxy:protect_delete", 53 | ] 54 | ) 55 | 56 | 57 | @pytest.mark.django_db 58 | def test_main_commands(capsys): 59 | """ 60 | Tests running main commands 61 | """ 62 | 63 | call_command("pgtrigger", "uninstall") 64 | call_command( 65 | "pgtrigger", 66 | "ls", 67 | "tests.SoftDelete:soft_delete", 68 | "tests.TestTriggerProxy:protect_delete", 69 | ) 70 | 71 | lines = capsys.readouterr().out.split("\n") 72 | assert set(lines) == set( 73 | [ 74 | "", 75 | "\x1b[91mUNINSTALLED\x1b[0m \x1b[94mN/A\x1b[0m tests.SoftDelete:soft_delete", 76 | "\x1b[91mUNINSTALLED\x1b[0m \x1b[94mN/A\x1b[0m tests.TestTriggerProxy:protect_delete", 77 | ] 78 | ) 79 | 80 | call_command("pgtrigger", "install") 81 | call_command( 82 | "pgtrigger", 83 | "ls", 84 | "tests.SoftDelete:soft_delete", 85 | "tests.TestTriggerProxy:protect_delete", 86 | ) 87 | 88 | lines = capsys.readouterr().out.split("\n") 89 | assert set(lines) == set( 90 | [ 91 | "", 92 | "\x1b[92mINSTALLED\x1b[0m \x1b[92mENABLED\x1b[0m tests.SoftDelete:soft_delete", 93 | "\x1b[92mINSTALLED\x1b[0m \x1b[92mENABLED\x1b[0m tests.TestTriggerProxy:protect_delete", 94 | ] 95 | ) 96 | 97 | call_command("pgtrigger", "disable") 98 | call_command( 99 | "pgtrigger", 100 | "ls", 101 | "tests.SoftDelete:soft_delete", 102 | "tests.TestTriggerProxy:protect_delete", 103 | ) 104 | 105 | lines = capsys.readouterr().out.split("\n") 106 | assert set(lines) == set( 107 | [ 108 | "", 109 | "\x1b[92mINSTALLED\x1b[0m \x1b[91mDISABLED\x1b[0m tests.SoftDelete:soft_delete", 110 | "\x1b[92mINSTALLED\x1b[0m \x1b[91mDISABLED\x1b[0m tests.TestTriggerProxy:protect_delete", 111 | ] 112 | ) 113 | 114 | call_command("pgtrigger", "enable") 115 | call_command( 116 | "pgtrigger", 117 | "ls", 118 | "tests.SoftDelete:soft_delete", 119 | "tests.TestTriggerProxy:protect_delete", 120 | ) 121 | 122 | lines = capsys.readouterr().out.split("\n") 123 | assert set(lines) == set( 124 | [ 125 | "", 126 | "\x1b[92mINSTALLED\x1b[0m \x1b[92mENABLED\x1b[0m tests.SoftDelete:soft_delete", 127 | "\x1b[92mINSTALLED\x1b[0m \x1b[92mENABLED\x1b[0m tests.TestTriggerProxy:protect_delete", 128 | ] 129 | ) 130 | 131 | 132 | @pytest.mark.django_db 133 | def test_prune(capsys): 134 | """Test pruning a trigger""" 135 | # Make it appear as though the trigger has been renamed and is no 136 | # longer installed 137 | soft_delete_model, soft_delete_trigger = pgtrigger.registered("tests.SoftDelete:soft_delete")[ 138 | 0 139 | ] 140 | with soft_delete_trigger.unregister(soft_delete_model): 141 | call_command("pgtrigger", "ls") 142 | captured = capsys.readouterr() 143 | lines = sorted(captured.out.split("\n")) 144 | assert ( 145 | "\x1b[96mPRUNE\x1b[0m \x1b[92mENABLED\x1b[0m tests_softdelete:pgtrigger_soft_delete_f41be" 146 | ) in lines 147 | 148 | call_command("pgtrigger", "prune") 149 | 150 | call_command("pgtrigger", "ls") 151 | lines = capsys.readouterr().out.split("\n") 152 | assert ( 153 | "\x1b[91mUNINSTALLED\x1b[0m \x1b[94mN/A\x1b[0m tests.SoftDelete:soft_delete" 154 | ) in lines 155 | 156 | call_command("pgtrigger", "install") 157 | call_command("pgtrigger", "ls") 158 | lines = capsys.readouterr().out.split("\n") 159 | assert ( 160 | "\x1b[92mINSTALLED\x1b[0m \x1b[92mENABLED\x1b[0m tests.SoftDelete:soft_delete" 161 | ) in lines 162 | 163 | 164 | @pytest.mark.django_db(databases=["default", "other"]) 165 | def test_outdated(capsys, mocker): 166 | """Test an outdated trigger""" 167 | # Make it appear like the trigger is out of date by changing 168 | # its hash 169 | mocker.patch.object( 170 | registry._registry["tests.SoftDelete:soft_delete"][1], 171 | "compile", 172 | return_value=mocker.Mock(hash="hash"), 173 | ) 174 | 175 | call_command("pgtrigger", "ls") 176 | lines = capsys.readouterr().out.split("\n") 177 | assert ( 178 | "\x1b[93mOUTDATED\x1b[0m \x1b[92mENABLED\x1b[0m tests.SoftDelete:soft_delete" 179 | ) in lines 180 | 181 | 182 | @pytest.mark.django_db 183 | def test_main_commands_w_args(capsys): 184 | """ 185 | Tests running main commands with arguments 186 | """ 187 | 188 | call_command("pgtrigger", "uninstall", "tests.SoftDelete:soft_delete") 189 | call_command( 190 | "pgtrigger", 191 | "ls", 192 | "tests.SoftDelete:soft_delete", 193 | "tests.TestTriggerProxy:protect_delete", 194 | ) 195 | 196 | lines = capsys.readouterr().out.split("\n") 197 | assert set(lines) == set( 198 | [ 199 | "\x1b[91mUNINSTALLED\x1b[0m \x1b[94mN/A\x1b[0m tests.SoftDelete:soft_delete", 200 | "\x1b[92mINSTALLED\x1b[0m \x1b[92mENABLED\x1b[0m tests.TestTriggerProxy:protect_delete", 201 | "", 202 | ] 203 | ) 204 | 205 | call_command("pgtrigger", "install", "tests.SoftDelete:soft_delete") 206 | call_command( 207 | "pgtrigger", 208 | "ls", 209 | "tests.SoftDelete:soft_delete", 210 | "tests.TestTriggerProxy:protect_delete", 211 | ) 212 | 213 | lines = capsys.readouterr().out.split("\n") 214 | assert set(lines) == set( 215 | [ 216 | "\x1b[92mINSTALLED\x1b[0m \x1b[92mENABLED\x1b[0m tests.SoftDelete:soft_delete", 217 | "\x1b[92mINSTALLED\x1b[0m \x1b[92mENABLED\x1b[0m tests.TestTriggerProxy:protect_delete", 218 | "", 219 | ] 220 | ) 221 | 222 | call_command("pgtrigger", "disable", "tests.SoftDelete:soft_delete") 223 | call_command( 224 | "pgtrigger", 225 | "ls", 226 | "tests.SoftDelete:soft_delete", 227 | "tests.TestTriggerProxy:protect_delete", 228 | ) 229 | 230 | lines = capsys.readouterr().out.split("\n") 231 | assert set(lines) == set( 232 | [ 233 | "\x1b[92mINSTALLED\x1b[0m \x1b[91mDISABLED\x1b[0m tests.SoftDelete:soft_delete", 234 | "\x1b[92mINSTALLED\x1b[0m \x1b[92mENABLED\x1b[0m tests.TestTriggerProxy:protect_delete", 235 | "", 236 | ] 237 | ) 238 | 239 | call_command("pgtrigger", "enable", "tests.SoftDelete:soft_delete") 240 | call_command( 241 | "pgtrigger", 242 | "ls", 243 | "tests.SoftDelete:soft_delete", 244 | "tests.TestTriggerProxy:protect_delete", 245 | ) 246 | 247 | lines = capsys.readouterr().out.split("\n") 248 | assert set(lines) == set( 249 | [ 250 | "\x1b[92mINSTALLED\x1b[0m \x1b[92mENABLED\x1b[0m tests.SoftDelete:soft_delete", 251 | "\x1b[92mINSTALLED\x1b[0m \x1b[92mENABLED\x1b[0m tests.TestTriggerProxy:protect_delete", 252 | "", 253 | ] 254 | ) 255 | -------------------------------------------------------------------------------- /pgtrigger/tests/test_multi_db.py: -------------------------------------------------------------------------------- 1 | """Tests multi-database support""" 2 | 3 | import contextlib 4 | 5 | import ddf 6 | import pytest 7 | from django.contrib.auth.models import User 8 | from django.core.management import call_command 9 | from django.db import transaction 10 | 11 | import pgtrigger 12 | from pgtrigger import core 13 | from pgtrigger.tests import models, utils 14 | 15 | 16 | class ToLogRouter: 17 | """ 18 | Route the "ToLog" model to the "other" database 19 | """ 20 | 21 | route_app_labels = {"auth", "contenttypes"} 22 | 23 | def db_for_write(self, model, **hints): 24 | if model == models.ToLogModel: 25 | return "other" 26 | 27 | return None 28 | 29 | 30 | @pytest.fixture(autouse=True) 31 | def routed_db(settings): 32 | settings.DATABASE_ROUTERS = [ 33 | "pgtrigger.tests.test_multi_db.ToLogRouter", 34 | "pgtrigger.tests.models.Router", 35 | ] 36 | 37 | 38 | @pytest.mark.django_db(databases=["default", "sqlite", "other"], transaction=True) 39 | def test_multi_db_ignore(): 40 | """Tests ignoring triggers across multiple databases""" 41 | trigger = pgtrigger.Protect(operation=pgtrigger.Delete, name="protect_deletes") 42 | 43 | with contextlib.ExitStack() as stack: 44 | stack.enter_context(trigger.register(models.ToLogModel)) 45 | stack.enter_context(trigger.register(User)) 46 | stack.enter_context(trigger.install(models.ToLogModel, database="other")) 47 | stack.enter_context(trigger.install(User)) 48 | 49 | with utils.raises_trigger_error(match="Cannot delete", database="other"): 50 | log = ddf.G(models.ToLogModel) 51 | log.delete() 52 | 53 | with utils.raises_trigger_error(match="Cannot delete"): 54 | user = ddf.G(User) 55 | user.delete() 56 | 57 | with transaction.atomic(): 58 | with pgtrigger.ignore("tests.ToLogModel:protect_deletes", "auth.User:protect_deletes"): 59 | log = models.ToLogModel.objects.create() 60 | log.delete() 61 | user = ddf.G(User) 62 | user.delete() 63 | 64 | with utils.raises_trigger_error(match="Cannot delete"): 65 | user = User.objects.create(username="hi") 66 | user.delete() 67 | 68 | with utils.raises_trigger_error(match="Cannot delete", database="other"): 69 | log = models.ToLogModel.objects.create() 70 | log.delete() 71 | 72 | 73 | @pytest.mark.django_db(databases=["default", "sqlite", "other"]) 74 | def test_full_ls(capsys): 75 | call_command("pgtrigger", "ls") 76 | captured = capsys.readouterr() 77 | lines = [line for line in captured.out.split("\n") if line] 78 | for line in lines: 79 | assert "\x1b[92mINSTALLED\x1b[0m" in line 80 | 81 | call_command("pgtrigger", "ls", "-d", "other") 82 | captured = capsys.readouterr() 83 | lines = [line for line in captured.out.split("\n") if line] 84 | for line in lines: 85 | # The router ignores partition models for the default DB 86 | if "tests.PartitionModel:protect_delete" in line: 87 | assert "\x1b[94mUNALLOWED\x1b[0m" in line 88 | else: 89 | assert "\x1b[92mINSTALLED\x1b[0m" in line 90 | 91 | call_command("pgtrigger", "ls", "-d", "sqlite") 92 | captured = capsys.readouterr() 93 | lines = [line for line in captured.out.split("\n") if line] 94 | for line in lines: 95 | assert "\x1b[94mUNALLOWED\x1b[0m" in line 96 | 97 | 98 | @pytest.mark.django_db(databases=["other"]) 99 | def test_disable_enable(capsys): 100 | call_command("pgtrigger", "disable", "-d", "other") 101 | for model, trigger in pgtrigger.registered(): 102 | expected_status = None if model == models.PartitionModel else False 103 | assert trigger.get_installation_status(model, database="other")[1] is expected_status 104 | 105 | call_command("pgtrigger", "enable", "--database", "other") 106 | for model, trigger in pgtrigger.registered(): 107 | expected_status = None if model == models.PartitionModel else True 108 | assert trigger.get_installation_status(model, database="other")[1] is expected_status 109 | 110 | 111 | @pytest.mark.django_db(databases=["sqlite"]) 112 | def test_ignore_non_postgres_dbs(): 113 | call_command("pgtrigger", "uninstall", "-d", "sqlite") 114 | call_command("pgtrigger", "install", "-d", "sqlite") 115 | call_command("pgtrigger", "install", "-d", "sqlite") 116 | call_command("pgtrigger", "prune", "-d", "sqlite") 117 | 118 | 119 | @pytest.mark.django_db(databases=["other", "default", "sqlite"]) 120 | def test_uninstall_install(): 121 | for model, trigger in pgtrigger.registered(): 122 | expected_status = core.UNALLOWED if model == models.PartitionModel else core.INSTALLED 123 | assert trigger.get_installation_status(model, database="other")[0] == expected_status 124 | 125 | call_command("pgtrigger", "uninstall", "-d", "other") 126 | call_command("pgtrigger", "uninstall", "-d", "default") 127 | for model, trigger in pgtrigger.registered(): 128 | expected_status = core.UNALLOWED if model == models.PartitionModel else core.UNINSTALLED 129 | assert trigger.get_installation_status(model, database="other")[0] == expected_status 130 | 131 | call_command("pgtrigger", "install", "--database", "other") 132 | for model, trigger in pgtrigger.registered(): 133 | expecetd_status = core.UNALLOWED if model == models.PartitionModel else core.INSTALLED 134 | assert trigger.get_installation_status(model, database="other")[0] == expecetd_status 135 | 136 | for model, trigger in pgtrigger.registered(): 137 | assert trigger.get_installation_status(model, database="default")[0] == core.UNINSTALLED 138 | -------------------------------------------------------------------------------- /pgtrigger/tests/test_multi_schema.py: -------------------------------------------------------------------------------- 1 | """Tests multi-database support""" 2 | 3 | # flake8: noqa 4 | 5 | import contextlib 6 | 7 | import ddf 8 | from django.core.management import call_command 9 | import pytest 10 | 11 | import pgtrigger 12 | from pgtrigger.tests import models, utils 13 | 14 | 15 | class SchemaRouter: 16 | """ 17 | A router to control tables that should be migrated to different schemas 18 | """ 19 | 20 | def db_for_read(self, model, **hints): 21 | if model == models.OrderSchema: 22 | return "order" 23 | elif model == models.ReceiptSchema: # pragma: no branch 24 | return "receipt" 25 | 26 | def db_for_write(self, model, **hints): 27 | return self.db_for_read(model, **hints) 28 | 29 | def allow_migrate(self, db, app_label, model_name=None, **hints): 30 | if model_name == "orderschema": 31 | return db == "order" 32 | elif model_name == "receiptschema": 33 | return db == "receipt" 34 | 35 | 36 | @pytest.fixture(autouse=True) 37 | def routed_db(settings): 38 | settings.DATABASE_ROUTERS = [ 39 | "pgtrigger.tests.test_multi_schema.SchemaRouter", 40 | "pgtrigger.tests.models.Router", 41 | ] 42 | 43 | 44 | @pytest.fixture(autouse=True) 45 | def schema_triggers(): 46 | protect_deletes = pgtrigger.Protect(name="protect_deletes", operation=pgtrigger.Delete) 47 | protect_updates = pgtrigger.Protect(name="protect_updates", operation=pgtrigger.Update) 48 | 49 | with contextlib.ExitStack() as stack: 50 | stack.enter_context(protect_deletes.register(models.OrderSchema)) 51 | stack.enter_context(protect_updates.register(models.ReceiptSchema)) 52 | 53 | yield 54 | 55 | 56 | @pytest.mark.django_db(databases=["order", "receipt"], transaction=True) 57 | def test_multi_schema_triggers_work(): 58 | """Verify the triggers in the schema_triggers fixture work""" 59 | call_command("pgtrigger", "install", "-d", "order") 60 | call_command("pgtrigger", "install", "-d", "receipt") 61 | 62 | order = ddf.G("tests.OrderSchema") 63 | receipt = ddf.G("tests.ReceiptSchema") 64 | 65 | with utils.raises_trigger_error(match="Cannot delete", database="order"): 66 | order.delete() 67 | 68 | with utils.raises_trigger_error(match="Cannot update", database="receipt"): 69 | receipt.char_field = "hello" 70 | receipt.save() 71 | 72 | receipt.delete() 73 | 74 | order = ddf.G("tests.OrderSchema") 75 | with pgtrigger.ignore("tests.OrderSchema:protect_deletes"): 76 | order.delete() 77 | 78 | 79 | @pytest.mark.django_db(databases=["order", "receipt", "default", "other"], transaction=True) 80 | def test_commands(capsys): 81 | """Verify commands work""" 82 | call_command("pgtrigger", "ls") 83 | lines = capsys.readouterr().out.split("\n") 84 | expected_lines = [ 85 | "", 86 | "\x1b[92mINSTALLED\x1b[0m \x1b[92mENABLED\x1b[0m tests.CustomSoftDelete:soft_delete", 87 | "\x1b[92mINSTALLED\x1b[0m \x1b[92mENABLED\x1b[0m tests.CustomTableName:protect_delete", 88 | "\x1b[92mINSTALLED\x1b[0m \x1b[92mENABLED\x1b[0m tests.FSM:fsm", 89 | "\x1b[92mINSTALLED\x1b[0m \x1b[92mENABLED\x1b[0m tests.SearchModel:add_body_title_to_vector", 90 | "\x1b[92mINSTALLED\x1b[0m \x1b[92mENABLED\x1b[0m tests.SearchModel:add_body_to_vector", 91 | "\x1b[92mINSTALLED\x1b[0m \x1b[92mENABLED\x1b[0m tests.SoftDelete:soft_delete", 92 | "\x1b[92mINSTALLED\x1b[0m \x1b[92mENABLED\x1b[0m tests.TestDefaultThrough:protect_it", 93 | "\x1b[92mINSTALLED\x1b[0m \x1b[92mENABLED\x1b[0m tests.TestTrigger:protect_misc_insert", 94 | "\x1b[92mINSTALLED\x1b[0m \x1b[92mENABLED\x1b[0m tests.TestTriggerProxy:protect_delete", 95 | "\x1b[92mINSTALLED\x1b[0m \x1b[92mENABLED\x1b[0m tests.ToLogModel:after_update_row_test", 96 | "\x1b[92mINSTALLED\x1b[0m \x1b[92mENABLED\x1b[0m tests.ToLogModel:after_update_statement_test", 97 | "\x1b[92mINSTALLED\x1b[0m \x1b[92mENABLED\x1b[0m tests.ToLogModel:update_of_statement_test", 98 | "\x1b[94mUNALLOWED\x1b[0m \x1b[94mN/A\x1b[0m tests.OrderSchema:protect_deletes", 99 | "\x1b[94mUNALLOWED\x1b[0m \x1b[94mN/A\x1b[0m tests.ReceiptSchema:protect_updates", 100 | ] 101 | assert set(expected_lines).issubset(set(lines)) 102 | 103 | call_command("pgtrigger", "ls", "-d", "receipt") 104 | lines = capsys.readouterr().out.split("\n") 105 | expected_lines = [ 106 | "", 107 | "\x1b[91mUNINSTALLED\x1b[0m \x1b[94mN/A\x1b[0m tests.ReceiptSchema:protect_updates", 108 | "\x1b[92mINSTALLED\x1b[0m \x1b[92mENABLED\x1b[0m tests.CustomSoftDelete:soft_delete", 109 | "\x1b[92mINSTALLED\x1b[0m \x1b[92mENABLED\x1b[0m tests.CustomTableName:protect_delete", 110 | "\x1b[92mINSTALLED\x1b[0m \x1b[92mENABLED\x1b[0m tests.FSM:fsm", 111 | "\x1b[92mINSTALLED\x1b[0m \x1b[92mENABLED\x1b[0m tests.SearchModel:add_body_title_to_vector", 112 | "\x1b[92mINSTALLED\x1b[0m \x1b[92mENABLED\x1b[0m tests.SearchModel:add_body_to_vector", 113 | "\x1b[92mINSTALLED\x1b[0m \x1b[92mENABLED\x1b[0m tests.SoftDelete:soft_delete", 114 | "\x1b[92mINSTALLED\x1b[0m \x1b[92mENABLED\x1b[0m tests.TestDefaultThrough:protect_it", 115 | "\x1b[92mINSTALLED\x1b[0m \x1b[92mENABLED\x1b[0m tests.TestTrigger:protect_misc_insert", 116 | "\x1b[92mINSTALLED\x1b[0m \x1b[92mENABLED\x1b[0m tests.TestTriggerProxy:protect_delete", 117 | "\x1b[92mINSTALLED\x1b[0m \x1b[92mENABLED\x1b[0m tests.ToLogModel:after_update_row_test", 118 | "\x1b[92mINSTALLED\x1b[0m \x1b[92mENABLED\x1b[0m tests.ToLogModel:after_update_statement_test", 119 | "\x1b[92mINSTALLED\x1b[0m \x1b[92mENABLED\x1b[0m tests.ToLogModel:update_of_statement_test", 120 | "\x1b[94mUNALLOWED\x1b[0m \x1b[94mN/A\x1b[0m tests.OrderSchema:protect_deletes", 121 | ] 122 | assert set(expected_lines).issubset(set(lines)) 123 | 124 | call_command("pgtrigger", "install", "-d", "receipt") 125 | call_command("pgtrigger", "ls", "-d", "receipt") 126 | lines = capsys.readouterr().out.split("\n") 127 | expected_lines = [ 128 | "\x1b[92mINSTALLED\x1b[0m \x1b[92mENABLED\x1b[0m tests.ReceiptSchema:protect_updates", 129 | ] 130 | assert set(expected_lines).issubset(set(lines)) 131 | 132 | # Installed a trigger to be pruned. 133 | protect_inserts = pgtrigger.Protect(name="protect_inserts", operation=pgtrigger.Insert) 134 | protect_inserts.install(models.OrderSchema, database="order") 135 | 136 | call_command("pgtrigger", "ls", "-d", "order") 137 | lines = capsys.readouterr().out.split("\n") 138 | expected_lines = [ 139 | "\x1b[91mUNINSTALLED\x1b[0m \x1b[94mN/A\x1b[0m tests.OrderSchema:protect_deletes", 140 | "\x1b[94mUNALLOWED\x1b[0m \x1b[94mN/A\x1b[0m tests.ReceiptSchema:protect_updates", 141 | "\x1b[96mPRUNE\x1b[0m \x1b[92mENABLED\x1b[0m tests_orderschema:pgtrigger_protect_inserts_a0767", 142 | ] 143 | assert set(expected_lines).issubset(set(lines)) 144 | 145 | call_command("pgtrigger", "prune", "-d", "order") 146 | call_command("pgtrigger", "install", "-d", "order") 147 | call_command("pgtrigger", "ls", "-d", "order") 148 | lines = capsys.readouterr().out.split("\n") 149 | for line in lines: 150 | assert "PRUNE" not in line 151 | expected_lines = [ 152 | "\x1b[92mINSTALLED\x1b[0m \x1b[92mENABLED\x1b[0m tests.OrderSchema:protect_deletes", 153 | "\x1b[94mUNALLOWED\x1b[0m \x1b[94mN/A\x1b[0m tests.ReceiptSchema:protect_updates", 154 | ] 155 | assert set(expected_lines).issubset(set(lines)) 156 | 157 | # Set the search path to a schema and check results 158 | call_command("pgtrigger", "uninstall", "-s", "receipt") 159 | call_command("pgtrigger", "ls", "tests.CustomSoftDelete:soft_delete", "-s", "receipt") 160 | lines = capsys.readouterr().out.split("\n") 161 | expected_lines = [ 162 | "", 163 | "\x1b[91mUNINSTALLED\x1b[0m \x1b[94mN/A\x1b[0m tests.CustomSoftDelete:soft_delete", 164 | ] 165 | assert set(expected_lines) == set(lines) 166 | 167 | call_command("pgtrigger", "install", "tests.CustomSoftDelete:soft_delete", "-s", "receipt") 168 | call_command("pgtrigger", "ls", "tests.CustomSoftDelete:soft_delete", "-s", "receipt") 169 | lines = capsys.readouterr().out.split("\n") 170 | expected_lines = [ 171 | "", 172 | "\x1b[92mINSTALLED\x1b[0m \x1b[92mENABLED\x1b[0m tests.CustomSoftDelete:soft_delete", 173 | ] 174 | assert set(expected_lines) == set(lines) 175 | -------------------------------------------------------------------------------- /pgtrigger/tests/test_registry.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | import pgtrigger 4 | from pgtrigger import registry 5 | from pgtrigger.tests import models 6 | 7 | 8 | def test_registered_invalid_args(): 9 | with pytest.raises(ValueError): 10 | pgtrigger.registered("uri") 11 | 12 | 13 | def test_registry(): 14 | """ 15 | Tests dynamically registering and unregistering triggers 16 | """ 17 | init_registry_size = len(registry._registry) 18 | # The trigger registry should already be populated with our test triggers 19 | assert init_registry_size >= 6 20 | 21 | # Add a trigger to the registry 22 | trigger = pgtrigger.Trigger( 23 | when=pgtrigger.Before, 24 | name="my_aliased_trigger", 25 | operation=pgtrigger.Insert | pgtrigger.Update, 26 | func="RAISE EXCEPTION 'no no no!';", 27 | ) 28 | 29 | # Register/unregister in context managers. The state should be the same 30 | # at the end as the beginning 31 | with trigger.register(models.TestModel): 32 | assert len(registry._registry) == init_registry_size + 1 33 | assert f"tests.TestModel:{trigger.name}" in registry._registry 34 | 35 | with trigger.unregister(models.TestModel): 36 | assert len(registry._registry) == init_registry_size 37 | assert f"tests.TestModel:{trigger.name}" not in registry._registry 38 | 39 | # Try obtaining trigger by alias 40 | assert pgtrigger.registered("tests.TestModel:my_aliased_trigger") 41 | 42 | assert len(registry._registry) == init_registry_size 43 | assert f"tests.TestModel:{trigger.name}" not in registry._registry 44 | with pytest.raises(KeyError, match="not found"): 45 | pgtrigger.registered(f"tests.TestModel:{trigger.name}") 46 | 47 | with pytest.raises(ValueError, match="must be in the format"): 48 | pgtrigger.registered("tests.TestMode") 49 | 50 | 51 | def test_duplicate_trigger_names(mocker): 52 | """Ensure that duplicate trigger names are properly detected""" 53 | 54 | # Add a trigger to the registry 55 | trigger1 = pgtrigger.Trigger( 56 | name="mytrigger", when=pgtrigger.Before, operation=pgtrigger.Insert 57 | ) 58 | trigger2 = pgtrigger.Protect( 59 | name="mytrigger", when=pgtrigger.Before, operation=pgtrigger.Insert 60 | ) 61 | trigger3 = pgtrigger.Trigger( 62 | name="MyTrigger", when=pgtrigger.Before, operation=pgtrigger.Insert 63 | ) 64 | 65 | assert trigger1.get_pgid(models.TestModel) == "pgtrigger_mytrigger_b34c5" 66 | assert trigger3.get_pgid(models.TestModel) == "pgtrigger_mytrigger_4a08f" 67 | 68 | # Check that a conflict cannot happen in the registry. 69 | # NOTE - use context managers to ensure we don't keep around 70 | # these registered triggers in other tests 71 | with trigger1.register(models.TestModel): 72 | with pytest.raises(KeyError, match="already used"): 73 | with trigger2.register(models.TestModel): 74 | pass 75 | 76 | mocker.patch.object(pgtrigger.Trigger, "get_pgid", return_value="duplicate") 77 | 78 | # Check that a conflict cannot happen in the generated postgres ID. 79 | # NOTE - use context managers to ensure we don't keep around 80 | # these registered triggers in other tests 81 | with pytest.raises(KeyError, match="already in use"): 82 | with trigger1.register(models.TestModel): 83 | pass 84 | 85 | 86 | def test_duplicate_trigger_names_proxy_model(mocker): 87 | """Test that duplicate trigger names are detected when using proxy models""" 88 | 89 | # TestTriggerProxy registers "protect_delete" for TestTrigger. 90 | # If we try to register this trigger directly on TestTrigger, it should result 91 | # in a duplicate error 92 | trigger = pgtrigger.Trigger( 93 | name="protect_delete", when=pgtrigger.Before, operation=pgtrigger.Insert 94 | ) 95 | with pytest.raises(KeyError, match="already used"): 96 | with trigger.register(models.TestTrigger): 97 | pass 98 | -------------------------------------------------------------------------------- /pgtrigger/tests/test_syncdb.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from django.db import connection 3 | 4 | import pgtrigger.tests.syncdb_app.models as syncdb_models 5 | from pgtrigger.tests import utils 6 | 7 | 8 | @pytest.mark.django_db 9 | def test_create_model_creates_triggers(): 10 | """ 11 | Tests trigger installation with syncdb 12 | 13 | `DatabaseSchemaEditorMixin.create_model` is called when the django app doesn't 14 | have a migrations module. `DatabaseSchemaEditorMixin.create_model` is also called 15 | during a `CreateTable` migration operation but as the triggers aren't stored with 16 | the `CreateTable operation`, the specific code that creates triggers in 17 | `DatabaseSchemaEditorMixin.create_model` isn't executed. 18 | """ 19 | with connection.schema_editor() as editor: 20 | editor.create_model(syncdb_models.NoMigrationModel) 21 | 22 | with utils.raises_trigger_error(match="no no no!"): 23 | syncdb_models.NoMigrationModel.objects.create(field="misc_insert", int_field=1) 24 | -------------------------------------------------------------------------------- /pgtrigger/tests/utils.py: -------------------------------------------------------------------------------- 1 | import contextlib 2 | 3 | import pytest 4 | from django.db import DEFAULT_DB_ALIAS, connections 5 | from django.db import transaction as db_transaction 6 | from django.db.utils import DatabaseError 7 | 8 | 9 | @contextlib.contextmanager 10 | def raises_trigger_error(match=None, database=DEFAULT_DB_ALIAS, transaction=None): 11 | with contextlib.ExitStack() as stack: 12 | stack.enter_context(pytest.raises(DatabaseError, match=match)) 13 | 14 | if transaction is None: 15 | transaction = connections[database].in_atomic_block 16 | 17 | if transaction: 18 | stack.enter_context(db_transaction.atomic(using=database)) 19 | 20 | yield 21 | -------------------------------------------------------------------------------- /pgtrigger/utils.py: -------------------------------------------------------------------------------- 1 | from django.conf import settings 2 | from django.core.exceptions import ImproperlyConfigured 3 | from django.db import DEFAULT_DB_ALIAS, connections 4 | from django.utils.version import get_version_tuple 5 | 6 | 7 | def _psycopg_version(): 8 | try: 9 | import psycopg as Database 10 | except ImportError: 11 | import psycopg2 as Database 12 | except Exception as exc: # pragma: no cover 13 | raise ImproperlyConfigured("Error loading psycopg2 or psycopg module") from exc 14 | 15 | version_tuple = get_version_tuple(Database.__version__.split(" ", 1)[0]) 16 | 17 | if version_tuple[0] not in (2, 3): # pragma: no cover 18 | raise ImproperlyConfigured(f"Pysocpg version {version_tuple[0]} not supported") 19 | 20 | return version_tuple 21 | 22 | 23 | psycopg_version = _psycopg_version() 24 | psycopg_maj_version = psycopg_version[0] 25 | 26 | 27 | class AttrDict(dict): 28 | """A dictionary where keys can be accessed as attributes""" 29 | 30 | def __init__(self, *args, **kwargs): 31 | super().__init__(*args, **kwargs) 32 | self.__dict__ = self 33 | 34 | 35 | def connection(database=None): 36 | """ 37 | Obtains the connection used for a trigger / model pair. The database 38 | for the connection is selected based on the write DB in the database 39 | router config. 40 | """ 41 | return connections[database or DEFAULT_DB_ALIAS] 42 | 43 | 44 | def pg_maj_version(cursor): 45 | """Return the major version of Postgres that's running""" 46 | version = getattr(cursor.connection, "server_version", cursor.connection.info.server_version) 47 | return int(str(version)[:-4]) 48 | 49 | 50 | def is_postgres(database): 51 | return connection(database).vendor == "postgresql" 52 | 53 | 54 | def postgres_databases(databases=None): 55 | """Return postgres databases from the provided list of databases. 56 | 57 | If no databases are provided, return all postgres databases 58 | """ 59 | databases = databases or list(settings.DATABASES) 60 | assert isinstance(databases, list) 61 | return [database for database in databases if is_postgres(database)] 62 | 63 | 64 | def exec_sql(sql, database=None, fetchall=False): 65 | if is_postgres(database): # pragma: no branch 66 | with connection(database).cursor() as cursor: 67 | cursor.execute(sql) 68 | 69 | if fetchall: 70 | return cursor.fetchall() 71 | 72 | 73 | def quote(label, char='"'): 74 | """Conditionally wraps a label in quotes""" 75 | if label.startswith(char) or label.endswith(char): 76 | return label 77 | else: 78 | return f"{char}{label}{char}" 79 | 80 | 81 | def render_uninstall(table, trigger_pgid): 82 | """Renders uninstallation SQL""" 83 | return f"DROP TRIGGER IF EXISTS {trigger_pgid} ON {quote(table)};" 84 | -------------------------------------------------------------------------------- /pgtrigger/version.py: -------------------------------------------------------------------------------- 1 | from importlib import metadata 2 | 3 | __version__ = metadata.version("django-pgtrigger") 4 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = ["poetry_core>=1.9.0"] 3 | build-backend = "poetry.core.masonry.api" 4 | 5 | [tool.coverage.run] 6 | branch = true 7 | source = ["pgtrigger"] 8 | 9 | [tool.coverage.report] 10 | exclude_lines = [ 11 | "pragma: no cover", 12 | "raise AssertionError", 13 | "raise NotImplementedError", 14 | "pass", 15 | "pytest.mark.skip", 16 | "@(typing\\.)?overload", 17 | "if TYPE_CHECKING:", 18 | ] 19 | show_missing = true 20 | fail_under = 100 21 | omit = ["pgtrigger/tests/migrations/*"] 22 | 23 | [tool.poetry] 24 | name = "django-pgtrigger" 25 | packages = [ 26 | { include = "pgtrigger" } 27 | ] 28 | exclude = [ 29 | "*/tests/" 30 | ] 31 | version = "4.15.2" 32 | description = "Postgres trigger support integrated with Django models." 33 | authors = ["Wes Kendall"] 34 | classifiers = [ 35 | "Intended Audience :: Developers", 36 | "Operating System :: OS Independent", 37 | "Programming Language :: Python", 38 | "Programming Language :: Python :: 3.9", 39 | "Programming Language :: Python :: 3.10", 40 | "Programming Language :: Python :: 3.11", 41 | "Programming Language :: Python :: 3.12", 42 | "Programming Language :: Python :: 3.13", 43 | "Programming Language :: Python :: 3 :: Only", 44 | "Framework :: Django", 45 | "Framework :: Django :: 4.2", 46 | "Framework :: Django :: 5.0", 47 | "Framework :: Django :: 5.1", 48 | "Framework :: Django :: 5.2", 49 | ] 50 | license = "BSD-3-Clause" 51 | readme = "README.md" 52 | homepage = "https://github.com/AmbitionEng/django-pgtrigger" 53 | repository = "https://github.com/AmbitionEng/django-pgtrigger" 54 | documentation = "https://django-pgtrigger.readthedocs.io" 55 | 56 | [tool.poetry.dependencies] 57 | python = ">=3.9.0,<4" 58 | django = ">=4" 59 | 60 | [tool.poetry.dev-dependencies] 61 | pytest = "8.3.5" 62 | pytest-cov = "6.1.1" 63 | pytest-dotenv = "0.5.2" 64 | pytest-mock = "3.14.0" 65 | pytest-order = "1.1.0" 66 | django-postgres-extra = "2.0.4" 67 | tox = "4.25.0" 68 | ruff = "0.11.6" 69 | pyright = "1.1.399" 70 | mkdocs = "1.6.1" 71 | black = "25.1.0" 72 | mkdocs-material = "9.6.12" 73 | mkdocstrings-python = "1.16.10" 74 | footing = "*" 75 | setuptools = "*" 76 | poetry-core = "1.9.1" 77 | cleo = "2.1.0" 78 | poetry-plugin-export = "1.8.0" 79 | typing-extensions = "4.13.2" 80 | django-stubs = "5.1.3" 81 | dj-database-url = "2.3.0" 82 | psycopg2-binary = "2.9.10" 83 | pytest-django = "4.11.1" 84 | django-dynamic-fixture = "4.0.1" 85 | django-pgbulk = "3.2.2" 86 | 87 | [tool.pytest.ini_options] 88 | xfail_strict = true 89 | addopts = "--reuse-db -m 'not independent'" 90 | testpaths = "pgtrigger/tests" 91 | norecursedirs = ".venv" 92 | DJANGO_SETTINGS_MODULE = "test_settings" 93 | markers = [ 94 | "independent: marks tests that should run independently of normal suite" 95 | ] 96 | 97 | [tool.ruff] 98 | lint.select = ["E", "F", "B", "I", "G", "C4"] 99 | line-length = 99 100 | target-version = "py39" 101 | 102 | [tool.pyright] 103 | exclude = [ 104 | "**/node_modules", 105 | "**/__pycache__", 106 | "src/experimental", 107 | "src/typestubs", 108 | "**/migrations/**", 109 | "**/tests/**", 110 | ] 111 | pythonVersion = "3.9" 112 | typeCheckingMode = "standard" 113 | -------------------------------------------------------------------------------- /settings.py: -------------------------------------------------------------------------------- 1 | import copy 2 | 3 | import dj_database_url 4 | 5 | SECRET_KEY = "django-pgtrigger" 6 | # Install the tests as an app so that we can make test models 7 | INSTALLED_APPS = [ 8 | "pgtrigger", 9 | # For testing purposes 10 | "django.contrib.auth", 11 | "django.contrib.contenttypes", 12 | "django.contrib.postgres", 13 | "psqlextra", 14 | "pgtrigger.tests", 15 | "pgtrigger.tests.syncdb_app", 16 | ] 17 | 18 | # Database url comes from the DATABASE_URL env var 19 | # We have some multi-database and multi-schema tests 20 | DATABASES = { 21 | "default": dj_database_url.config(), 22 | "sqlite": {"ENGINE": "django.db.backends.sqlite3", "NAME": "test_sqlite"}, 23 | } 24 | 25 | DATABASES["other"] = copy.deepcopy(DATABASES["default"]) 26 | DATABASES["other"]["NAME"] += "_other" 27 | 28 | DATABASES["default"]["ENGINE"] = "psqlextra.backend" 29 | 30 | DATABASES["order"] = copy.deepcopy(DATABASES["default"]) 31 | DATABASES["order"]["OPTIONS"] = {"options": "-c search_path=order"} 32 | DATABASES["receipt"] = copy.deepcopy(DATABASES["default"]) 33 | DATABASES["receipt"]["OPTIONS"] = {"options": "-c search_path=receipt"} 34 | 35 | DEFAULT_AUTO_FIELD = "django.db.models.AutoField" 36 | 37 | # Ensure partitioned models dont get migrated for non-default DBs 38 | DATABASE_ROUTERS = ["pgtrigger.tests.models.Router"] 39 | 40 | # Turn off pgtrigger migrations for normal manage.py use 41 | PGTRIGGER_MIGRATIONS = False 42 | 43 | # Ensure that we always install triggers if running locally 44 | PGTRIGGER_INSTALL_ON_MIGRATE = True 45 | 46 | USE_TZ = False 47 | -------------------------------------------------------------------------------- /test_settings.py: -------------------------------------------------------------------------------- 1 | from settings import * 2 | 3 | # Turn on pgtrigger migrations in the test suite 4 | PGTRIGGER_MIGRATIONS = True 5 | 6 | # We turn this on in tests to ensure that triggers are installed 7 | # when the test database is set up. We dynamically turn it off 8 | # when testing migrations. 9 | PGTRIGGER_INSTALL_ON_MIGRATE = True 10 | -------------------------------------------------------------------------------- /tox.ini: -------------------------------------------------------------------------------- 1 | [tox] 2 | isolated_build = true 3 | envlist = 4 | py{39,310,311,312,313}-django42-psycopg2 5 | py313-django42-psycopg3 6 | py{310,311,312,313}-django50-psycopg2 7 | py313-django50-psycopg3 8 | py{310,311,312,313}-django51-psycopg2 9 | py313-django51-psycopg3 10 | py{310,311,312,313}-django52-psycopg2 11 | py313-django52-psycopg3 12 | report 13 | 14 | [testenv] 15 | allowlist_externals = 16 | poetry 17 | bash 18 | grep 19 | skip_install = true 20 | passenv = 21 | DATABASE_URL 22 | PYTHONDONTWRITEBYTECODE 23 | install_command = pip install {opts} --no-compile {packages} 24 | deps = 25 | django42: Django>=4.2,<4.3 26 | django50: Django>=5.0,<5.1 27 | django51: Django>=5.1,<5.2 28 | django52: Django>=5.2,<5.3 29 | psycopg2: psycopg2-binary 30 | psycopg3: psycopg[binary] 31 | commands = 32 | bash -c 'poetry export --with dev --without-hashes -f requirements.txt | grep -v "^[dD]jango==" | grep -v "^psycopg2-binary==" | pip install --no-compile -q --no-deps -r /dev/stdin' 33 | pip install --no-compile -q --no-deps --no-build-isolation -e . 34 | pytest --create-db --cov --cov-fail-under=0 --cov-append --cov-config pyproject.toml {posargs} 35 | # There are some tests that must run independently of the original test suite because of making 36 | # dynamic models 37 | pytest --create-db --cov --cov-fail-under=0 --cov-append --cov-config pyproject.toml -m independent {posargs} 38 | 39 | [testenv:report] 40 | allowlist_externals = 41 | coverage 42 | skip_install = true 43 | depends = py{39,310,311,312,313}-django42-psycopg2, py313-django42-psycopg3, py{310,311,312,313}-django50-psycopg2, py313-django50-psycopg3, py{310,311,312,313}-django51-psycopg2, py313-django51-psycopg3, py{310,311,312,313}-django52-psycopg2, py313-django52-psycopg3 44 | parallel_show_output = true 45 | commands = 46 | coverage report --fail-under 100 47 | coverage erase 48 | --------------------------------------------------------------------------------