├── .github
└── workflows
│ ├── ci.yml
│ ├── coverage.yml
│ ├── docs.yml
│ └── markdownlint.yml
├── .gitignore
├── Dockerfile
├── LICENSE
├── Makefile
├── README.md
├── docs-theme
└── main.html
├── docs
├── classification.md
├── index.md
├── rules.md
└── tags.md
├── migration_lint
├── __init__.py
├── analyzer
│ ├── __init__.py
│ ├── base.py
│ ├── compat.py
│ └── squawk.py
├── django
│ ├── __init__.py
│ ├── apps.py
│ ├── extractor
│ │ ├── __init__.py
│ │ └── django_management.py
│ └── management
│ │ ├── __init__.py
│ │ └── commands
│ │ ├── __init__.py
│ │ └── migration_lint.py
├── extractor
│ ├── __init__.py
│ ├── alembic.py
│ ├── base.py
│ ├── django.py
│ ├── flyway.py
│ ├── model.py
│ └── raw_sql.py
├── main.py
├── source_loader
│ ├── __init__.py
│ ├── base.py
│ ├── gitlab.py
│ ├── local.py
│ └── model.py
├── sql
│ ├── __init__.py
│ ├── constants.py
│ ├── model.py
│ ├── operations.py
│ ├── parser.py
│ └── rules.py
└── util
│ ├── __init__.py
│ ├── colors.py
│ └── env.py
├── mkdocs.yml
├── poetry.lock
├── pre_build.py
├── pyproject.toml
├── style.rb
└── tests
├── __init__.py
├── test_alembic_extractor.py
├── test_analyzer.py
├── test_classify_statement.py
├── test_django_extractor.py
├── test_django_management_extractor.py
├── test_flyway_extractor.py
├── test_gitlab_loader.py
├── test_main.py
└── test_squawk_linter.py
/.github/workflows/ci.yml:
--------------------------------------------------------------------------------
1 | name: CI
2 |
3 | on:
4 | push:
5 | branches:
6 | - main
7 | pull_request:
8 | branches:
9 | - main
10 |
11 | jobs:
12 | test:
13 | runs-on: ubuntu-latest
14 | strategy:
15 | matrix:
16 | python-version: ["3.9", "3.10", "3.11"]
17 | steps:
18 | - name: Checkout code
19 | uses: actions/checkout@v3
20 |
21 | - name: Set up Python
22 | uses: actions/setup-python@v4
23 | with:
24 | python-version: ${{ matrix.python-version }}
25 |
26 | - name: Install dependencies
27 | run: |
28 | make install
29 |
30 | - name: Run tests
31 | run: |
32 | make test
33 |
34 | lint-python:
35 | runs-on: ubuntu-latest
36 | steps:
37 | - name: Checkout code
38 | uses: actions/checkout@v3
39 |
40 | - name: Set up Python
41 | uses: actions/setup-python@v4
42 | with:
43 | python-version: '3.9'
44 |
45 | - name: Install dependencies
46 | run: |
47 | make install
48 |
49 | - name: Run tests
50 | run: |
51 | make lint-python
52 |
53 | lint-markdown:
54 | runs-on: ubuntu-latest
55 | steps:
56 | - name: Checkout code
57 | uses: actions/checkout@v3
58 |
59 | - name: Run markdownlint
60 | run: make lint-markdown
--------------------------------------------------------------------------------
/.github/workflows/coverage.yml:
--------------------------------------------------------------------------------
1 | name: Publish Coverage
2 |
3 | on:
4 | push:
5 | branches:
6 | - main
7 |
8 | jobs:
9 | publish-coverage:
10 | runs-on: ubuntu-latest
11 | steps:
12 | - name: Checkout code
13 | uses: actions/checkout@v3
14 |
15 | - name: Set up Python
16 | uses: actions/setup-python@v4
17 | with:
18 | python-version: '3.9'
19 |
20 | - name: Install dependencies
21 | run: |
22 | make install
23 |
24 | - name: Run tests
25 | run: |
26 | make test
27 |
28 | - name: Coverage Badge
29 | run: |
30 | pip install coverage-badge "coverage[toml]"
31 | coverage-badge -o coverage.svg
32 |
33 | - name: Push badge to orphan branch
34 | run: |
35 | git config --global user.name "github-actions[bot]"
36 | git config --global user.email "github-actions[bot]@users.noreply.github.com"
37 | git checkout --orphan coverage-badge
38 | git reset --hard
39 | git add coverage.svg
40 | git commit -m "Update coverage badge"
41 | git push --force origin coverage-badge
42 |
--------------------------------------------------------------------------------
/.github/workflows/docs.yml:
--------------------------------------------------------------------------------
1 | name: Markdown-lint
2 |
3 | on:
4 | push:
5 | branches:
6 | - main
7 |
8 | jobs:
9 | publish-docs:
10 | runs-on: ubuntu-latest
11 | steps:
12 | - name: Checkout code
13 | uses: actions/checkout@v3
14 |
15 | - name: Set up Python
16 | uses: actions/setup-python@v4
17 | with:
18 | python-version: '3.9'
19 |
20 | - run: pip install mkdocs-material
21 | - run: mkdocs gh-deploy --force
--------------------------------------------------------------------------------
/.github/workflows/markdownlint.yml:
--------------------------------------------------------------------------------
1 | name: Markdown-lint
2 |
3 | on:
4 | push:
5 | branches:
6 | - main
7 | pull_request:
8 | branches:
9 | - main
10 |
11 | jobs:
12 | lint-markdown:
13 | runs-on: ubuntu-latest
14 | steps:
15 | - name: Checkout code
16 | uses: actions/checkout@v3
17 |
18 | - name: Run markdownlint
19 | run: make lint-markdown
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Byte-compiled / optimized / DLL files
2 | __pycache__/
3 | *.py[cod]
4 | *$py.class
5 |
6 | # C extensions
7 | *.so
8 |
9 | # Distribution / packaging
10 | .Python
11 | build/
12 | develop-eggs/
13 | dist/
14 | downloads/
15 | eggs/
16 | .eggs/
17 | lib/
18 | lib64/
19 | parts/
20 | sdist/
21 | var/
22 | wheels/
23 | share/python-wheels/
24 | *.egg-info/
25 | .installed.cfg
26 | *.egg
27 | MANIFEST
28 |
29 | # PyInstaller
30 | # Usually these files are written by a python script from a template
31 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
32 | *.manifest
33 | *.spec
34 |
35 | # Installer logs
36 | pip-log.txt
37 | pip-delete-this-directory.txt
38 |
39 | # Unit test / coverage reports
40 | htmlcov/
41 | .tox/
42 | .nox/
43 | .coverage
44 | .coverage.*
45 | .cache
46 | nosetests.xml
47 | coverage.xml
48 | *.cover
49 | *.py,cover
50 | .hypothesis/
51 | .pytest_cache/
52 | cover/
53 | junit.xml
54 |
55 | # Translations
56 | *.mo
57 | *.pot
58 |
59 | # Django stuff:
60 | *.log
61 | local_settings.py
62 | db.sqlite3
63 | db.sqlite3-journal
64 |
65 | # Flask stuff:
66 | instance/
67 | .webassets-cache
68 |
69 | # Scrapy stuff:
70 | .scrapy
71 |
72 | # Sphinx documentation
73 | docs/_build/
74 |
75 | # PyBuilder
76 | .pybuilder/
77 | target/
78 |
79 | # Jupyter Notebook
80 | .ipynb_checkpoints
81 |
82 | # IPython
83 | profile_default/
84 | ipython_config.py
85 |
86 | # pyenv
87 | # For a library or package, you might want to ignore these files since the code is
88 | # intended to run in multiple environments; otherwise, check them in:
89 | # .python-version
90 |
91 | # pipenv
92 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
93 | # However, in case of collaboration, if having platform-specific dependencies or dependencies
94 | # having no cross-platform support, pipenv may install dependencies that don't work, or not
95 | # install all needed dependencies.
96 | #Pipfile.lock
97 |
98 | # poetry
99 | # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
100 | # This is especially recommended for binary packages to ensure reproducibility, and is more
101 | # commonly ignored for libraries.
102 | # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
103 | #poetry.lock
104 |
105 | # pdm
106 | # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
107 | #pdm.lock
108 | # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
109 | # in version control.
110 | # https://pdm.fming.dev/latest/usage/project/#working-with-version-control
111 | .pdm.toml
112 | .pdm-python
113 | .pdm-build/
114 |
115 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
116 | __pypackages__/
117 |
118 | # Celery stuff
119 | celerybeat-schedule
120 | celerybeat.pid
121 |
122 | # SageMath parsed files
123 | *.sage.py
124 |
125 | # Environments
126 | .env
127 | .venv
128 | env/
129 | venv/
130 | ENV/
131 | env.bak/
132 | venv.bak/
133 |
134 | # Spyder project settings
135 | .spyderproject
136 | .spyproject
137 |
138 | # Rope project settings
139 | .ropeproject
140 |
141 | # mkdocs documentation
142 | /site
143 |
144 | # mypy
145 | .mypy_cache/
146 | .dmypy.json
147 | dmypy.json
148 |
149 | # Pyre type checker
150 | .pyre/
151 |
152 | # pytype static type analyzer
153 | .pytype/
154 |
155 | # Cython debug symbols
156 | cython_debug/
157 |
158 | # PyCharm
159 | # JetBrains specific template is maintained in a separate JetBrains.gitignore that can
160 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
161 | # and can be added to the global gitignore or merged into this file. For a more nuclear
162 | # option (not recommended) you can uncomment the following to ignore the entire idea folder.
163 | .idea/
164 |
165 | migration_lint/bin
166 |
--------------------------------------------------------------------------------
/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM python:3.11-slim
2 |
3 | ARG version
4 |
5 | WORKDIR /app/
6 |
7 | RUN apt-get update -y \
8 | && apt-get install --no-install-recommends -y git curl \
9 | && rm -rf /var/lib/apt/lists/*
10 |
11 | RUN pip install migration-lint==$version
12 |
13 | ENTRYPOINT ["migration-lint"]
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2024 PandaDoc
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/Makefile:
--------------------------------------------------------------------------------
1 | .EXPORT_ALL_VARIABLES:
2 |
3 | COMPOSE_FILE ?= docker/docker-compose-local.yml
4 | COMPOSE_PROJECT_NAME ?= migration-lint
5 |
6 | DOTENV_BASE_FILE ?= .env-local
7 | DOTENV_CUSTOM_FILE ?= .env-custom
8 |
9 | POETRY_EXPORT_OUTPUT = requirements.txt
10 | POETRY_EXTRAS = --extras "git" --extras "django"
11 | POETRY_GROUPS = --with "dev,test"
12 | POETRY_PREINSTALLED ?= false
13 | POETRY_PUBLISH_PRERELEASE ?= false
14 | POETRY_VERSION = 1.2.2
15 | POETRY ?= $(HOME)/.local/bin/poetry
16 |
17 | PYTHON_INSTALL_PACKAGES_USING ?= poetry
18 | PYTHONPATH := $(PYTHONPATH):$(CURDIR)/proto/
19 |
20 | WHEELHOUSE_HOME ?= .wheelhouse
21 |
22 | -include $(DOTENV_BASE_FILE)
23 | -include $(DOTENV_CUSTOM_FILE)
24 |
25 | .PHONY: install-poetry
26 | install-poetry:
27 | ifeq ($(POETRY_PREINSTALLED), true)
28 | $(POETRY) self update $(POETRY_VERSION)
29 | else
30 | curl -sSL https://install.python-poetry.org | python -
31 | endif
32 |
33 | .PHONY: install-packages
34 | install-packages:
35 | ifeq ($(PYTHON_INSTALL_PACKAGES_USING), poetry)
36 | $(POETRY) install -vv $(POETRY_EXTRAS) $(POETRY_GROUPS) $(opts)
37 | else
38 | $(POETRY) run pip install \
39 | --no-index \
40 | --find-links=$(WHEELHOUSE_HOME) \
41 | --requirement=$(POETRY_EXPORT_OUTPUT)
42 | endif
43 |
44 | .PHONY: install
45 | install: install-poetry install-packages
46 |
47 | .PHONY: export-packages
48 | export-packages:
49 | $(POETRY) export \
50 | $(POETRY_EXTRAS) \
51 | $(POETRY_GROUPS) \
52 | --without-hashes \
53 | --without-urls \
54 | --output=$(POETRY_EXPORT_OUTPUT)
55 |
56 | .PHONY: prepare-wheels
57 | prepare-wheels: lock-packages export-packages
58 | @$(POETRY) run pip wheel \
59 | --wheel-dir=$(WHEELHOUSE_HOME) \
60 | --find-links=$(WHEELHOUSE_HOME) \
61 | --requirement=$(POETRY_EXPORT_OUTPUT)
62 |
63 | .PHONY: lock-packages
64 | lock-packages:
65 | $(POETRY) lock -vv --no-update
66 |
67 | .PHONY: update-packages
68 | update-packages:
69 | $(POETRY) update -vv
70 |
71 | .PHONY: lint-mypy
72 | lint-mypy:
73 | $(POETRY) run mypy migration_lint
74 |
75 | .PHONY: lint-ruff
76 | ifdef CI
77 | lint-ruff: export RUFF_NO_CACHE=true
78 | endif
79 | lint-ruff:
80 | $(POETRY) run ruff check .
81 | $(POETRY) run ruff format --check --diff .
82 |
83 | .PHONY: lint-python
84 | lint-python: lint-mypy lint-ruff
85 |
86 | .PHONY: lint-markdown
87 | lint-markdown:
88 | docker run \
89 | --rm \
90 | --interactive \
91 | --read-only \
92 | --volume=`pwd`:`pwd` \
93 | --workdir=`pwd` \
94 | --entrypoint='' \
95 | registry.gitlab.com/pipeline-components/markdownlint:0.11.3 \
96 | mdl --style style.rb .
97 |
98 | .PHONY: lint
99 | lint: lint-python lint-markdown
100 |
101 | .PHONY: fmt-ruff
102 | fmt-ruff:
103 | $(POETRY) run ruff check . --fix
104 | $(POETRY) run ruff format .
105 |
106 | .PHONY: fmt
107 | fmt: fmt-ruff
108 |
109 | .PHONY: test
110 | test:
111 | $(POETRY) run pytest --cov=migration_lint $(opts) $(call tests,.)
112 |
113 | .PHONY: docker-up
114 | docker-up:
115 | docker compose up --remove-orphans -d
116 | docker compose ps
117 |
118 | .PHONY: docker-down
119 | docker-down:
120 | docker compose down
121 |
122 | .PHONY: docker-logs
123 | docker-logs:
124 | docker compose logs --follow
125 |
126 | .PHONY: docker-ps
127 | docker-ps:
128 | docker compose ps
129 |
130 | .PHONY: publish
131 | publish:
132 | $(POETRY) publish \
133 | --no-interaction \
134 | --build
135 |
136 | #.PHONY: release
137 | #release:
138 | # scripts/release.sh
139 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Migration Lint
2 |
3 | 
4 | 
5 | 
6 |
7 | `migration-lint` is the modular linter tool designed
8 | to perform checks on database schema migrations
9 | and prevent unsafe operations.
10 |
11 | Features:
12 |
13 | - Works with [Django migrations](https://docs.djangoproject.com/en/5.1/topics/migrations/),
14 | [Alembic](https://alembic.sqlalchemy.org/en/latest/) and raw sql files.
15 | - Easily extensible for other frameworks.
16 | - Can identify Backward Incompatible operations
17 | and check if they are allowed in the current context.
18 | - Can identify "unsafe" operations, e.g. operations that acquire locks
19 | that can be dangerous for production database.
20 |
21 | ## Installation
22 |
23 | ```shell linenums="0"
24 | poetry add "migration-lint"
25 | ```
26 |
27 | ```shell linenums="0"
28 | pip install "migration-lint"
29 | ```
30 |
31 | ## Documentation
32 |
33 | Read the docs on [GitHub Pages](https://pandadoc.github.io/migration-lint/)
--------------------------------------------------------------------------------
/docs-theme/main.html:
--------------------------------------------------------------------------------
1 | {% extends "base.html" %}
2 |
3 | {% block outdated %}
4 | You're not viewing the latest version.
5 |
6 | Click here to go to latest.
7 |
8 | {% endblock %}
9 |
10 |
--------------------------------------------------------------------------------
/docs/classification.md:
--------------------------------------------------------------------------------
1 | # DB migrations classification
2 |
3 | ## Context
4 |
5 | This DB migration classification is intended to be
6 | a basis for the unified DB migrations process.
7 |
8 | ### Migrations Classification
9 |
10 | ### Common Multi-Stage Migrations Pattern
11 |
12 | All stages are optional.
13 |
14 | | Name | Description | Autorun? |
15 | |--------|------------------------------------------------------------------|----------------|
16 | | stage1 | Backward-compatible schema migration + (optional) code migration | safe |
17 | | stage2 | Backfilling data migration | * not safe on prod, safe on stagings |
18 | | stage3 | Code update that is a preparation for backward-incompatible schema migration | |
19 | | stage4 | Backward-incompatible schema migration | * no on prod |
20 |
21 | According to this pattern, we distinguish the following types of migrations:
22 |
23 | | Name | Stages |
24 | |-----------------------------------------|-----------------------|
25 | | Data migration | * stage2 |
26 | | Backward-compatible migration | * stage1 |
27 | | Backward-incompatible migration | * stage3, stage4 |
28 | | Backward-incompatible migration requiring data backfilling | * stage1, stage2, stage3 (optional), stage4 |
29 |
30 | ### Consequences
31 |
32 | * Backward-compatible schema changes can be combined with the corresponding code
33 | updates (including the code required for backfilling data migrations).
34 | * Code updates required to prepare for backward-incompatible changes
35 | must be a separate deployment.
36 | * Backward-incompatible schema changes must be a separate deployment.
37 |
38 | ### Locks
39 |
40 | | Name | Allowed DQL/DMS Commands | Conflicting DQL/DML Commands |
41 | |-----------------------|-------------------------|------------------------------|
42 | | AccessExclusiveLock | | SELECT, INSERT, UPDATE, DELETE |
43 | | ShareRowExclusiveLock | SELECT | INSERT, UPDATE, DELETE |
44 |
45 | ### Migrations
46 |
47 | ### Notes
48 |
49 | * We try to make all migrations idempotent.
50 | * We note locks if they are important.
51 |
52 | ## Index Operations
53 |
54 | ### Create Index
55 |
56 | Backward-compatible migration
57 |
58 | * **stage1**
59 | *`CREATE INDEX CONCURRENTLY IF NOT EXISTS ....`
60 | *`REINDEX INDEX CONCURRENTLY ` (if not valid)
61 | *Update code to use the new index (optional, if the index is used in code).
62 |
63 | ### Drop Index
64 |
65 | Backward-incompatible migration
66 |
67 | * **stage3**: Update code to not use an index that will be deleted (optional,
68 | if the index is used in code).
69 | * **stage4**: `DROP INDEX CONCURRENTLY IF EXISTS `.
70 |
71 | > **Note**: `DROP INDEX CONCURRENTLY` cannot be used to drop any index
72 | > that supports a constraint. See Drop primary key and Drop UNIQUE constraint.
73 |
74 | ### Rename Index
75 |
76 | Backward-compatible migration
77 |
78 | * **stage1**: `ALTER INDEX IF EXISTS ... RENAME TO ....`
79 |
80 | ### Reindex
81 |
82 | Backward-compatible migration
83 |
84 | * **stage1**: `REINDEX INDEX CONCURRENTLY ....`
85 |
86 | ---
87 |
88 | ## Sequence Operations
89 |
90 | ### Create Sequence
91 |
92 | Backward-compatible migration
93 |
94 | * **stage1**:
95 | * `CREATE SEQUENCE ....`
96 | * Update code to use the new sequence
97 | (optional, if the sequence is used in code).
98 |
99 | ### Drop Sequence
100 |
101 | Backward-incompatible migration
102 |
103 | * **stage3**: Update code to not use a sequence that will be deleted
104 | (optional, if the sequence is used in code).
105 | * **stage4**: `DROP SEQUENCE `.
106 |
107 | ### Alter Sequence
108 |
109 | Backward-compatible migration
110 |
111 | * **stage1**: `ALTER SEQUENCE ....`
112 |
113 | ---
114 |
115 | ## Table Operations
116 |
117 | ### Create Table
118 |
119 | Backward-compatible migration
120 |
121 | * **stage1**:
122 | * `CREATE TABLE ...`
123 | * Update code to use the new table.
124 |
125 | > **WARNING**: If there are foreign keys, table creation requires
126 | > `ShareRowExclusiveLock` on the child tables, so use `lock_timeout`
127 | > if the table to create contains foreign keys. `ADD FOREIGN KEY ... NOT VALID`
128 | > does require the same lock, so it doesn’t make much sense
129 | > to create foreign keys separately.
130 |
131 | ### Drop Table
132 |
133 | Backward-incompatible migration
134 |
135 | * **stage3**: Update code to not use a table that will be deleted.
136 | * **stage4**:
137 | * Drop all foreign key constraints to the table (see Drop foreign key).
138 | * `DROP TABLE `.
139 |
140 | ### Rename Table
141 |
142 | Backward-incompatible migration
143 |
144 | * **stage1**
145 | * Rename a table and create a view for backward compatibility
146 | (`AccessExclusiveLock`).
147 | * Update code to use the new table name.
148 |
149 | ```sql
150 | BEGIN;
151 | ALTER TABLE RENAME TO ;
152 | CREATE VIEW AS
153 | SELECT * FROM ;
154 | COMMIT;
155 | ```
156 |
157 | * **stage4**: `DROP VIEW `
158 |
159 | ## Column Operations
160 |
161 | ### ADD COLUMN ... NULL
162 |
163 | Backward-compatible migration
164 |
165 | * **stage1**
166 | * `ALTER TABLE ADD COLUMN ... NULL`
167 | * Update code to use the new column.
168 |
169 | ### ADD COLUMN ... NOT NULL
170 |
171 | Backward-incompatible migration requiring data backfilling
172 |
173 | * **stage1**
174 | * `ALTER TABLE ADD COLUMN ... DEFAULT `
175 | (safe from PostgreSQL 11)
176 | * Update code to use the new column (the code shouldn’t create null values
177 | for this column)
178 | * **stage2**: Backfill the new column with the default value.
179 | For existing table with data in it default value is mandatory.
180 | * **stage4**: Add NOT NULL constraint:
181 | * `ALTER TABLE ADD CONSTRAINT
182 | CHECK ( IS NOT NULL) NOT VALID`.
183 | * `ALTER TABLE VALIDATE CONSTRAINT `.
184 | * `ALTER TABLE ALTER COLUMN SET NOT NULL`
185 | *(from PostgreSQL 12, “if a valid CHECK constraint is found which proves
186 | no NULL can exist, then the table scan is skipped”).*
187 | * `ALTER TABLE ALTER COLUMN DROP DEFAULT`.
188 | * `ALTER TABLE DROP CONSTRAINT IF EXISTS `.
189 |
190 | ### ALTER COLUMN ... SET NOT NULL
191 |
192 | Backward-incompatible migration requiring data backfilling
193 |
194 | * **stage1:**
195 | * `ALTER TABLE ALTER COLUMN
196 | SET DEFAULT .`
197 | * Update code to not write null values for the column.
198 | * **stage2**: Backfill the column with the default value.
199 | * **stage4**: Add NOT NULL constraint:
200 | * `ALTER TABLE ADD CONSTRAINT
201 | CHECK ( IS NOT NULL) NOT VALID.`
202 | * `ALTER TABLE VALIDATE CONSTRAINT .`
203 | * `ALTER TABLE ALTER COLUMN SET NOT NULL`
204 | *(from PostgreSQL 12, “if a valid CHECK constraint is found
205 | which proves no NULL can exist, then the table scan is skipped”).*
206 | * `ALTER TABLE ALTER COLUMN DROP DEFAULT.`
207 | * `ALTER TABLE DROP CONSTRAINT IF EXISTS .`
208 |
209 | ### ALTER COLUMN ... DROP NOT NULL
210 |
211 | Backward-compatible migration
212 |
213 | * **stage1**: `ALTER TABLE ALTER COLUMN DROP NOT NULL.`
214 |
215 | ### ADD COLUMN ... NULL DEFAULT
216 |
217 | Backward-compatible migration (safe from PostgreSQL 11)
218 |
219 | * **stage1:**
220 | * `ALTER TABLE ADD COLUMN ... NULL DEFAULT .`
221 | * Update code to use the new column.
222 |
223 | ### ADD COLUMN ... NOT NULL DEFAULT
224 |
225 | Backward-compatible migration (safe from PostgreSQL 11)
226 |
227 | * **stage1:**
228 | * `ALTER TABLE ADD COLUMN ... NOT NULL DEFAULT `
229 | * Update code to use the new column.
230 |
231 | ### ALTER COLUMN ... SET DEFAULT
232 |
233 | Backward-compatible migration
234 |
235 | * **stage1**: `ALTER TABLE ALTER COLUMN
236 | SET DEFAULT .`
237 |
238 | ### ALTER COLUMN ... DROP DEFAULT
239 |
240 | Backward-incompatible migration (in the worst case if the column is NOT NULL)
241 |
242 | * **stage3**: Update the code to provide the default value
243 | (optional, if the column is NOT NULL).
244 | * **stage4**: `ALTER TABLE ALTER COLUMN DROP DEFAULT.`
245 |
246 | ### ADD COLUMN ... bigserial PRIMARY KEY
247 |
248 | This is specific case for migrating tables ids from usual int to bigint.
249 |
250 | Backward-incompatible migration requiring data backfilling
251 |
252 | * **stage1:**
253 | * `CREATE SEQUENCE __seq AS bigint START `
254 | (start value must be greater than number of rows in the table).
255 | * `ALTER TABLE ADD COLUMN bigint DEFAULT 0,
256 | ALTER COLUMN SET DEFAULT nextval('__seq').`
257 | * `ALTER SEQUENCE __seq OWNED BY ..`
258 | * **stage2**: Backfill the new column with values 1 .. N.
259 | * **stage3**: Update the code to use the new column.
260 | * **stage4**: Add a primary key constraint:
261 | * `ALTER TABLE ADD CONSTRAINT
262 | CHECK ( IS NOT NULL) NOT VALID.`
263 | * `ALTER TABLE VALIDATE CONSTRAINT .`
264 | * `ALTER TABLE ALTER COLUMN SET NOT NULL`
265 | *(from PostgreSQL 12, “if a valid CHECK constraint is found
266 | which proves no NULL can exist, then the table scan is skipped”).*
267 | * `CREATE UNIQUE INDEX CONCURRENTLY ON ....`
268 | * `ALTER TABLE ADD CONSTRAINT _pkey PRIMARY KEY
269 | USING INDEX .`
270 | * `ALTER TABLE DROP CONSTRAINT IF EXISTS .`
271 |
272 | ### ADD COLUMN ... UUID PRIMARY KEY
273 |
274 | Backward-incompatible migration requiring data backfilling
275 |
276 | * **stage1:**
277 | * `ALTER TABLE ADD COLUMN UUID.`
278 | * `ALTER TABLE ALTER COLUMN
279 | SET DEFAULT gen_random_uuid()`
280 | *(for PostgreSQL < 13, use uuid_generate_v4 function
281 | from uuid-ossp extension).*
282 | * Update the code to use the new field.
283 | * **stage2**: Backfill the new column with unique UUID values.
284 | * **stage4**: Add primary key constraint:
285 | * `ALTER TABLE ADD CONSTRAINT
286 | CHECK ( IS NOT NULL) NOT VALID.`
287 | * `ALTER TABLE VALIDATE CONSTRAINT .`
288 | * `ALTER TABLE ALTER COLUMN SET NOT NULL`
289 | *(from PostgreSQL 12, “if a valid CHECK constraint is found
290 | which proves no NULL can exist, then the table scan is skipped”).*
291 | * `CREATE UNIQUE INDEX CONCURRENTLY ON ....`
292 | * `ALTER TABLE ADD CONSTRAINT _pkey
293 | PRIMARY KEY USING INDEX .`
294 | * `ALTER TABLE ALTER COLUMN DROP DEFAULT.`
295 | * `ALTER TABLE DROP CONSTRAINT IF EXISTS .`
296 |
297 | ### ADD COLUMN ... UNIQUE
298 |
299 | Backward-compatible migration
300 |
301 | * **stage1:**
302 | * `ALTER TABLE ADD COLUMN ....`
303 | * `CREATE UNIQUE INDEX CONCURRENTLY ON ....`
304 | * `ALTER TABLE ADD CONSTRAINT UNIQUE USING INDEX .`
305 |
306 | ### ADD COLUMN ... GENERATED AS IDENTITY
307 |
308 | This operation going to acquire AccessExclusiveLock and rewrite the whole table
309 | on the spot. So the only safe way to do it is
310 | backward-incompatible migration requiring data backfilling.
311 |
312 | * **stage1:**
313 | * `CREATE SEQUENCE __seq AS bigint START `
314 | * Note that start value shouldn't be 1, you need a space to backfill ids.
315 | * `ALTER TABLE ADD COLUMN bigint DEFAULT 0,
316 | ALTER COLUMN SET DEFAULT nextval('__seq').`
317 | * `ALTER SEQUENCE __seq OWNED BY ..`
318 | * **stage2**: Backfill the new column with unique sequential values.
319 | * **stage3**:
320 | * `ALTER TABLE ADD CONSTRAINT
321 | CHECK ( IS NOT NULL) NOT VALID`
322 | * `ALTER TABLE VALIDATE CONSTRAINT `
323 | * `ALTER TABLE ALTER COLUMN SET NOT NULL`
324 | * `BEGIN`
325 | * `ALTER TABLE ALTER COLUMN DROP DEFAULT`
326 | * `ALTER TABLE ALTER COLUMN ADD GENERATED BY DEFAULT
327 | AS IDENTITY (START )`
328 | * `COMMIT`
329 | * `DROP SEQUENCE __seq`
330 |
331 | Be aware that IDENTITY doesn't guarantee that column is unique,
332 | if you want this, take a look on the "Add UNIQUE Constraint" section.
333 |
334 | ---
335 |
336 | ### Change Column Type
337 |
338 | **Here we have two cases:**
339 |
340 | * Backward-compatible migration — directly use `ALTER TABLE
341 | ALTER COLUMN TYPE ...` and update code in the following cases
342 | (for PostgreSQL >= 9.2):
343 | * varchar(LESS) to varchar(MORE) where LESS < MORE
344 | * varchar(ANY) to text
345 | * numeric(LESS, SAME) to numeric(MORE, SAME)
346 | where LESS < MORE and SAME == SAME
347 |
348 | Backward-incompatible migration requiring data backfilling in all other cases
349 | (Tip: It's better to avoid such cases if possible):
350 |
351 | * **stage1:**
352 | * `ALTER TABLE ADD COLUMN new_ ...`
353 | (if column is NOT NULL, add this constraint in a separate migration;
354 | see ALTER COLUMN ... SET NOT NULL).
355 | * Dual write to both columns with a BEFORE INSERT/UPDATE trigger:
356 |
357 | ```sql
358 | CREATE OR REPLACE FUNCTION ()
359 | RETURNS trigger
360 | AS
361 | $$
362 | BEGIN
363 | NEW. := NEW.;
364 | RETURN NEW;
365 | END
366 | $$
367 | LANGUAGE 'plpgsql';
368 |
369 | CREATE TRIGGER
370 | BEFORE INSERT OR UPDATE
371 | ON
372 | FOR EACH ROW
373 | EXECUTE PROCEDURE ();
374 | ```
375 |
376 | * **stage2**: Backfill the new column with a copy of the old column’s values.
377 | * **stage4**:
378 | * Add foreign key constraints referencing the new column
379 | (see Add foreign key).
380 | * Drop foreign key constraints referencing the old column
381 | (see Drop foreign key).
382 | * Rename to old_ and new_ to
383 | within a single transaction and explicit LOCK statement.
384 | * `DROP TRIGGER ` in the same transaction.
385 | * `DROP FUNCTION ` in the same transaction.
386 | * `DROP INDEX CONCURRENTLY` for all indexes using the old column.
387 | * `DROP COLUMN old_`.
388 |
389 | ### Rename Column
390 |
391 | **Tip**: Avoid renaming columns when possible.
392 |
393 | Backward-incompatible migration requiring data backfilling
394 |
395 | * **stage1:**
396 | * Rename the table and create a view for backward compatibility
397 | (AccessExclusiveLock).
398 | * Update code to use the new column.
399 |
400 | ```sql
401 | BEGIN;
402 |
403 | ALTER TABLE
404 | RENAME COLUMN TO ;
405 |
406 | ALTER TABLE RENAME TO _tmp;
407 |
408 | CREATE VIEW AS
409 | SELECT *, AS
410 | FROM _tmp;
411 |
412 | COMMIT;
413 | ```
414 |
415 | * **stage4**: Drop the view and restore the original table name
416 | (AccessExclusiveLock).
417 |
418 | ```sql
419 | BEGIN;
420 |
421 | DROP VIEW ;
422 |
423 | ALTER TABLE _new RENAME TO ;
424 |
425 | COMMIT;
426 | ```
427 |
428 | **DEPRECATED Approach:**
429 |
430 | * **stage1:**
431 | * `ALTER TABLE ADD COLUMN new_ ...`
432 | (if column is NOT NULL, add this constraint in a separate migration;
433 | see ALTER COLUMN ... SET NOT NULL).
434 | * Dual write to both columns with a BEFORE INSERT/UPDATE trigger:
435 |
436 | ```sql
437 | CREATE OR REPLACE FUNCTION ()
438 | RETURNS trigger
439 | AS
440 | $$
441 | BEGIN
442 | NEW. := NEW.;
443 | RETURN NEW;
444 | END
445 | $$
446 | LANGUAGE 'plpgsql';
447 |
448 | CREATE TRIGGER
449 | BEFORE INSERT OR UPDATE
450 | ON
451 | FOR EACH ROW
452 | EXECUTE PROCEDURE ();
453 | ```
454 |
455 | * **stage2**: Backfill the new column with a copy of the old column’s values.
456 | * **stage3**: Update code to use the new column name.
457 | * **stage4**:
458 | * Add foreign key constraints referencing the new column
459 | (see Add foreign key).
460 | * Drop foreign key constraints referencing the old column
461 | (see Drop foreign key).
462 | * `DROP TRIGGER `.
463 | * `DROP FUNCTION `.
464 | * `DROP INDEX CONCURRENTLY` for all indexes using the old column.
465 | * `DROP COLUMN `.
466 |
467 | ### Drop Column
468 |
469 | Backward-incompatible migration
470 |
471 | * **stage3**: Update code to not use the column that will be dropped.
472 | * **stage4**:
473 | * Drop foreign key constraints referencing the column (see Drop foreign key).
474 | * `DROP INDEX CONCURRENTLY` for all indexes using the column.
475 | * `ALTER TABLE DROP COLUMN `.
476 |
477 | ## Constraints
478 |
479 | ### Add NOT NULL Constraint
480 |
481 | Backward-compatible migration
482 |
483 | * **stage1:**
484 | * `ALTER TABLE ADD CONSTRAINT
485 | CHECK ( IS NOT NULL) NOT VALID`.
486 | * `ALTER TABLE VALIDATE CONSTRAINT `
487 | (may fail if data is inconsistent).
488 | * `ALTER TABLE ALTER COLUMN SET NOT NULL`
489 | (from PostgreSQL 12, “if a valid CHECK constraint is found
490 | which proves no NULL can exist, then the table scan is skipped”).
491 | * `ALTER TABLE DROP CONSTRAINT IF EXISTS `.
492 |
493 | ### Remove NOT NULL Constraint
494 |
495 | Backward-compatible migration
496 |
497 | * **stage1:** `ALTER TABLE ALTER COLUMN DROP NOT NULL`.
498 |
499 | ### Add Foreign Key
500 |
501 | Backward-compatible migration
502 |
503 | * **stage1:**
504 | * `ALTER TABLE ADD FOREIGN KEY ... NOT VALID`.
505 | * `ALTER TABLE VALIDATE CONSTRAINT `
506 | (may fail if data is inconsistent).
507 |
508 | ### Drop Foreign Key
509 |
510 | Backward-compatible migration
511 |
512 | * **stage1:**
513 | * `ALTER TABLE DROP CONSTRAINT IF EXISTS `.
514 |
515 | ### Drop Primary Key Constraint
516 |
517 | Backward-compatible migration
518 |
519 | * **stage1:**
520 | * Update code to not rely on the column
521 | * `DROP INDEX CONCURRENTLY` for all indexes
522 | * Drop foreign keys in other tables
523 | * `ALTER TABLE DROP CONSTRAINT IF EXISTS `.
524 |
525 | ### Add Check Constraints
526 |
527 | Backward-compatible migration
528 |
529 | * **stage1:**
530 | * `ALTER TABLE ADD CONSTRAINT CHECK (...) NOT VALID`.
531 | * `ALTER TABLE VALIDATE CONSTRAINT `.
532 |
533 | ### Drop Check Constraint
534 |
535 | Backward-compatible migration
536 |
537 | * **stage1:**
538 | * `ALTER TABLE DROP CONSTRAINT IF EXISTS `.
539 |
540 | ### Add UNIQUE Constraint
541 |
542 | Backward-compatible migration
543 |
544 | * **stage1:**
545 | * `CREATE UNIQUE INDEX CONCURRENTLY ON ....`
546 | * `ALTER TABLE ADD CONSTRAINT UNIQUE USING INDEX `.
547 |
548 | ### Drop UNIQUE Constraint
549 |
550 | Backward-compatible migration
551 |
552 | * **stage1:**
553 | * `DROP INDEX CONCURRENTLY`
554 | * `SET lock_timeout = '1s';`
555 | * `ALTER TABLE DROP CONSTRAINT IF EXISTS .`
556 |
557 | ## Data Migrations
558 |
559 | All data migrations operations are lock-safe, but should be done
560 | with batches and considering database load.
561 |
562 | ## Useful Links
563 |
564 | * [tbicr/django-pg-zero-downtime-migrations](https://github.com/tbicr/django-pg-zero-downtime-migrations)
565 | * [PostgreSQL at Scale: Database Schema Changes Without Downtime](https://example.com)
566 | * [Waiting for PostgreSQL 11 – Fast ALTER TABLE ADD COLUMN with a non-NULL default](https://example.com)
567 |
--------------------------------------------------------------------------------
/docs/index.md:
--------------------------------------------------------------------------------
1 | # Welcome to **migration-lint**
2 |
3 | `migration-lint` is the modular linter tool designed
4 | to perform checks on database schema migrations
5 | and prevent unsafe operations.
6 |
7 | Features:
8 |
9 | - Works with [Django migrations](https://docs.djangoproject.com/en/5.1/topics/migrations/),
10 | [Alembic](https://alembic.sqlalchemy.org/en/latest/) and raw sql files.
11 | - Easily extensible for other frameworks.
12 | - Can identify Backward Incompatible operations
13 | and check if they are allowed in the current context.
14 | - Can identify "unsafe" operations, e.g. operations that acquire locks
15 | that can be dangerous for production database.
16 |
17 | ## Installation
18 |
19 | ```shell linenums="0"
20 | poetry add "migration-lint"
21 | ```
22 |
23 | ```shell linenums="0"
24 | pip install "migration-lint"
25 | ```
26 |
27 | ## Terms
28 |
29 | - **Source loader** (or just loader) - class that loads list of changed files.
30 | - **Extractor** - class that extracts SQL by migration name,
31 | so it depends on the framework you use for migrations.
32 | - **Linter** - class that checks migration's SQL and context
33 | and returns errors if any. We have implemented our linter
34 | for backward incompatible migrations as well as integrated `squawk` linter.
35 |
36 | ## Run
37 |
38 | ### Local
39 |
40 | If you need to check local git changes (for example before commit):
41 |
42 | ```shell linenums="0"
43 | migration-lint --loader=local_git --extractor=
44 | ```
45 |
46 | It will examine files in current repository that are added or modified
47 | and not yet commited.
48 |
49 | ### GitLab
50 |
51 | If you need to run it on the GitLab pipeline:
52 |
53 | ```shell linenums="0"
54 | migration-lint --loader=gitlab_branch --extractor=
55 | ```
56 |
57 | It relies on default GitLab [environment variables](https://docs.gitlab.com/ee/ci/variables/predefined_variables.html),
58 | namely CI_PROJECT_ID, CI_COMMIT_BRANCH.
59 | You also should issue a token with read permissions
60 | and put it into env variable CI_DEPLOY_GITLAB_TOKEN.
61 |
62 | Also, these parameters can be passed via options:
63 |
64 | ```shell linenums="0"
65 | migration-lint --loader=gitlab_branch --extractor=
66 | --project-id= --branch= --gitlab-api-key=
67 | ```
68 |
69 | Also, version for Merge Requests is available:
70 |
71 | ```shell linenums="0"
72 | migration-lint --loader=gitlab_branch --extractor=
73 | ```
74 |
75 | I uses env variable CI_MERGE_REQUEST_ID or option --mr-id.
76 |
77 | ## Feedback
78 |
79 | We value feedback and are committed to supporting engineers throughout
80 | their journey.
81 |
82 | We have a dedicated email where we encourage engineers to share their feedback,
83 | ask questions, and seek assistance with any issues they may encounter.
84 | We appreciate your input and look forward to engaging with you
85 | to make your experience even better.
86 |
87 | [:material-email: Write us!](mailto:migration-lint-team@pandadoc.com)
88 | { .md-button .md-button--primary }
89 |
--------------------------------------------------------------------------------
/docs/rules.md:
--------------------------------------------------------------------------------
1 | # Rules API
2 |
3 | Linter supports own format for declarative rules definition:
4 |
5 | ```python
6 |
7 | from migration_lint.sql.model import ConditionalMatch, KeywordLocator, SegmentLocator
8 |
9 | rule = SegmentLocator(
10 | type="alter_table_statement",
11 | children=[
12 | KeywordLocator(raw="ADD"),
13 | KeywordLocator(raw="CONSTRAINT"),
14 | KeywordLocator(raw="NOT", inverted=True),
15 | KeywordLocator(raw="VALID", inverted=True),
16 | ],
17 | only_with=ConditionalMatch(
18 | locator=SegmentLocator(type="create_table_statement"),
19 | match_by=SegmentLocator(type="table_reference"),
20 | ),
21 | )
22 |
23 | ```
24 |
25 | - **SegmentLocator** - definition for any SQL part.
26 | It can match SQL code segments by a type
27 | (see [sqlfluff dialects and segment types](https://github.com/sqlfluff/sqlfluff/tree/main/src/sqlfluff/dialects)),
28 | raw content, children, etc.
29 | - **KeywordLocator** - a short version of SegmentLocator
30 | if you want to match the exact keyword.
31 | - **inverted=true** - flag for children inverted matching,
32 | for example in the example above it's
33 | "find ALTER TABLE ... ADD CONSTRAINT statement, but without NOT VALID".
34 | - **ConditionalMatch** - helps to check the migration context.
35 | For example, the ADD FOREIGN KEY statement can be highly dangerous
36 | if you run it on a big table, but if you just created this table,
37 | it's totally fine. **locator** parameter helps to find statements in the same migration,
38 | **match_by** helps to match the found statement with the one that is being checked.
39 | In the example above it's "find in the same migration CREATE TABLE statement
40 | and ensure that it's the same table".
41 |
42 | ## Rules order
43 |
44 | Rules are being checked from safest to the most dangerous:
45 |
46 | - Ignored
47 | - Data migration
48 | - Backward compatible
49 | - Backward incompatible
50 | - Restricted
51 |
52 | When you define a backward compatible, make sure that
53 | the rule is as specific as possible so that everything that is not
54 | explicitly allowed would be prohibited.
55 |
56 | ## Ignoring statements
57 |
58 | Add the following line in the migration SQL representation
59 | to ignore whole migration:
60 |
61 | ```sql
62 | -- migration-lint: ignore
63 | ```
64 |
65 | If you're using code-based migrations,
66 | make sure that the comment will appear in the SQL:
67 |
68 | ```python
69 | # example for Alembic
70 | op.execute("SELECT 1; -- migration-lint: ignore")
71 |
72 | # example for Django
73 | migrations.RunSQL("SELECT 1; -- migration-lint: ignore", migrations.RunSQL.noop),
74 | ```
--------------------------------------------------------------------------------
/docs/tags.md:
--------------------------------------------------------------------------------
1 | # Tags
2 |
3 | [TAGS]
4 |
--------------------------------------------------------------------------------
/migration_lint/__init__.py:
--------------------------------------------------------------------------------
1 | import logging
2 |
3 | logger = logging.getLogger("default")
4 | logger.setLevel(level=logging.DEBUG)
5 | fh = logging.StreamHandler()
6 | fh_formatter = logging.Formatter("%(message)s")
7 | fh.setFormatter(fh_formatter)
8 | logger.addHandler(fh)
9 |
--------------------------------------------------------------------------------
/migration_lint/analyzer/__init__.py:
--------------------------------------------------------------------------------
1 | from migration_lint.analyzer.base import Analyzer
2 | from migration_lint.analyzer.compat import CompatibilityLinter
3 | from migration_lint.analyzer.squawk import SquawkLinter
4 |
5 | __all__ = (
6 | "Analyzer",
7 | "CompatibilityLinter",
8 | "SquawkLinter",
9 | )
10 |
--------------------------------------------------------------------------------
/migration_lint/analyzer/base.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | import abc
4 | from typing import Sequence, List
5 |
6 | from migration_lint import logger
7 | from migration_lint.extractor.base import BaseExtractor
8 | from migration_lint.extractor.model import ExtendedSourceDiff
9 | from migration_lint.source_loader.base import BaseSourceLoader
10 | from migration_lint.util.colors import green, red, yellow
11 |
12 |
13 | # A migration ignore mark.
14 | MANUALLY_IGNORE_ANNOTATION = "-- migration-lint: ignore"
15 |
16 | CLASSIFICATION_LINK = "https://pandadoc.github.io/migration-lint/classification/"
17 | IGNORE_LINK = "https://pandadoc.github.io/migration-lint/rules/#ignoring-statements"
18 |
19 |
20 | class BaseLinter:
21 | """Base class for migration linters."""
22 |
23 | @abc.abstractmethod
24 | def lint(
25 | self,
26 | migration_sql: str,
27 | changed_files: List[ExtendedSourceDiff],
28 | ) -> List[str]:
29 | """Perform SQL migartion linting."""
30 |
31 | raise NotImplementedError()
32 |
33 |
34 | class Analyzer:
35 | """Migrations analyzer."""
36 |
37 | def __init__(
38 | self,
39 | loader: BaseSourceLoader,
40 | extractor: BaseExtractor,
41 | linters: Sequence[BaseLinter],
42 | ) -> None:
43 | self.loader = loader
44 | self.extractor = extractor
45 | self.linters = linters
46 |
47 | def analyze(self) -> None:
48 | """Analyze migrations in files changed according to analyzer's source
49 | loader.
50 | """
51 |
52 | changed_files = self.loader.get_changed_files()
53 | metadata = self.extractor.create_metadata(changed_files)
54 |
55 | if not metadata.migrations:
56 | logger.info("Looks like you don't have any migration in MR.")
57 | return
58 |
59 | logger.info("")
60 |
61 | errors = []
62 | for migration in metadata.migrations:
63 | logger.info(green(f"Analyzing migration: {migration.path}\n"))
64 |
65 | if MANUALLY_IGNORE_ANNOTATION in migration.raw_sql:
66 | logger.info(yellow("Migration is ignored."))
67 | continue
68 |
69 | for linter in self.linters:
70 | errors.extend(
71 | linter.lint(migration.raw_sql, metadata.changed_files),
72 | )
73 |
74 | logger.info("")
75 |
76 | if errors:
77 | logger.info(red("Errors found in migrations:\n"))
78 | for error in errors:
79 | logger.error(error)
80 | logger.info("")
81 | logger.info(
82 | f"See classification of statements if you need to fix: {CLASSIFICATION_LINK}"
83 | )
84 | logger.info(
85 | f"See how to ignore the linter for one migration: {IGNORE_LINK}"
86 | )
87 | exit(1)
88 | else:
89 | logger.info(green("Everything seems good!"))
90 |
--------------------------------------------------------------------------------
/migration_lint/analyzer/compat.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | from io import StringIO
4 | from typing import List
5 |
6 | from migration_lint import logger
7 | from migration_lint.analyzer.base import BaseLinter
8 | from migration_lint.extractor.model import ExtendedSourceDiff
9 | from migration_lint.sql.constants import StatementType
10 | from migration_lint.sql.parser import classify_migration
11 | from migration_lint.util.colors import blue
12 |
13 |
14 | DOCS_URL = "https://pandadoc.github.io/migration-lint/classification/"
15 |
16 |
17 | class CompatibilityLinter(BaseLinter):
18 | f"""
19 | Custom linter that checks backward compatibility
20 | based on migrations classification.
21 |
22 | See {DOCS_URL} for details.
23 | """
24 |
25 | def lint(
26 | self,
27 | migration_sql: str,
28 | changed_files: List[ExtendedSourceDiff],
29 | report_restricted: bool = False,
30 | ) -> List[str]:
31 | """Perform SQL migration linting."""
32 |
33 | errors = []
34 |
35 | classification_result = classify_migration(migration_sql)
36 |
37 | statement_types = set()
38 | logger.info(blue("Migration contains statements:\n"))
39 | for statement_sql, statement_type in classification_result:
40 | statement_types.add(statement_type)
41 | logger.info(f"- {statement_type.colorized}: {statement_sql}")
42 |
43 | if statement_type == StatementType.UNSUPPORTED:
44 | errors.append(f"- Statement can't be identified: {statement_sql}")
45 |
46 | if statement_type == StatementType.RESTRICTED and report_restricted:
47 | errors.append(
48 | (
49 | f"- Statement is restricted to use: {statement_sql}."
50 | f"\n\tCheck the doc to do this correctly: {DOCS_URL}.\n"
51 | )
52 | )
53 |
54 | if StatementType.RESTRICTED in statement_types and not report_restricted:
55 | errors.append(
56 | (
57 | "- There are restricted statements in migration"
58 | "\n\tCheck squawk output below for details"
59 | f"\n\tAlso check the doc to fix it: {DOCS_URL}\n"
60 | )
61 | )
62 |
63 | if StatementType.BACKWARD_INCOMPATIBLE in statement_types:
64 | not_allowed_files = [
65 | file.path
66 | for file in changed_files
67 | if not file.allowed_with_backward_incompatible
68 | ]
69 | if not_allowed_files:
70 | error = StringIO()
71 | error.write(
72 | (
73 | "- You have backward incompatible operations, "
74 | "which is not allowed with changes in following files:"
75 | )
76 | )
77 | for file_name in not_allowed_files:
78 | error.write(f"\n\t- {file_name}")
79 | error.write(
80 | "\n\n\tPlease, separate changes in different merge requests.\n"
81 | )
82 |
83 | errors.append(error.getvalue())
84 |
85 | if StatementType.DATA_MIGRATION in statement_types and (
86 | StatementType.BACKWARD_COMPATIBLE in statement_types
87 | or StatementType.BACKWARD_INCOMPATIBLE in statement_types
88 | ):
89 | statement_sql = [
90 | r[0]
91 | for r in classification_result
92 | if r[1] == StatementType.DATA_MIGRATION
93 | ][0]
94 | errors.append(
95 | (
96 | f"- Seems like you have data migration along with schema migration: {statement_sql}"
97 | "\n\n\tPlease, separate changes in different merge requests.\n"
98 | )
99 | )
100 |
101 | return errors
102 |
--------------------------------------------------------------------------------
/migration_lint/analyzer/squawk.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | import os.path
4 | import re
5 | import subprocess
6 | import sys
7 | from io import StringIO
8 | from collections import defaultdict
9 | from typing import List, Dict, Optional
10 |
11 | import migration_lint
12 | from migration_lint.analyzer.base import BaseLinter
13 | from migration_lint.extractor.model import ExtendedSourceDiff
14 |
15 |
16 | class SquawkLinter(BaseLinter):
17 | """Squawk linter integration."""
18 |
19 | ignored_rules = [
20 | "ban-drop-column", # Backward-incompatible, checked by compatibility analyzer.
21 | "ban-drop-table", # Backward-incompatible, checked by compatibility analyzer.
22 | "adding-not-nullable-field", # Backward-incompatible, checked by compatibility analyzer.
23 | "prefer-big-int", # Deprecated.
24 | "prefer-identity",
25 | "prefer-timestamptz",
26 | "ban-drop-not-null", # Dropping a NOT NULL constraint is safe but may break existing clients.
27 | "prefer-robust-stmts", # TODO: Add transactions tracking.
28 | "transaction-nesting", # TODO: Add transactions tracking.
29 | ]
30 |
31 | def __init__(
32 | self, config_path: Optional[str] = None, pg_version: Optional[str] = None
33 | ) -> None:
34 | bin_dir = os.path.join(migration_lint.__path__[0], "bin")
35 | self.config_path = config_path
36 | self.pg_version = pg_version
37 | if sys.platform == "linux":
38 | self.squawk = os.path.join(bin_dir, "squawk-linux-x86")
39 | elif sys.platform == "darwin":
40 | self.squawk = os.path.join(bin_dir, "squawk-darwin-arm64")
41 | else:
42 | raise RuntimeError(f"unsupported platform: {sys.platform}")
43 |
44 | def squawk_command(self, migration_sql: str) -> str:
45 | """Get squawk command."""
46 | exclude = f"--exclude={','.join(self.ignored_rules)}"
47 | config = f"--config={self.config_path}" if self.config_path else ""
48 | pg_version = f"--pg-version={self.pg_version}" if self.pg_version else ""
49 |
50 | return " ".join([self.squawk, exclude, config, pg_version]).strip()
51 |
52 | def lint(
53 | self,
54 | migration_sql: str,
55 | changed_files: List[ExtendedSourceDiff],
56 | ) -> List[str]:
57 | """Perform SQL migration linting."""
58 |
59 | output = subprocess.run(
60 | self.squawk_command(migration_sql),
61 | input=migration_sql.encode(),
62 | shell=True,
63 | stdout=subprocess.PIPE,
64 | ).stdout.decode("utf-8")
65 |
66 | # Reformat the output for brevity.
67 |
68 | statements: Dict[str, str] = {}
69 | error_msgs: Dict[str, List[str]] = defaultdict(list)
70 |
71 | key = None
72 | statement = None
73 | error_msg = None
74 | for line in output.splitlines():
75 | stmt_m = re.match(r"\s+\d+\s+\|\s(.*)", line)
76 |
77 | if line.startswith("stdin:"):
78 | if key is not None:
79 | statements[key] = statement.getvalue()
80 | error_msgs[key].append(error_msg.getvalue())
81 |
82 | key = line[: line.index(" ")]
83 |
84 | statement = None
85 | error_msg = StringIO()
86 | error_msg.write(f" - squawk {line[line.index(' ') + 1:]}\n\n")
87 |
88 | elif stmt_m is not None:
89 | if statement is None:
90 | statement = StringIO()
91 | statement.write(f"- {stmt_m.group(1)}\n")
92 | else:
93 | statement.write(f" {stmt_m.group(1)}\n")
94 |
95 | elif line.startswith(" "):
96 | if error_msg is not None:
97 | error_msg.write(f" {line}\n")
98 | else:
99 | if key is not None:
100 | assert statement is not None
101 | assert error_msg is not None
102 |
103 | statements[key] = statement.getvalue()
104 | error_msgs[key].append(error_msg.getvalue())
105 |
106 | # Generate the errors.
107 |
108 | errors: List[str] = []
109 |
110 | error = StringIO()
111 | for key in statements:
112 | error.write(f"{statements[key]}\n")
113 | for msg in error_msgs[key]:
114 | error.write(msg)
115 | errors.append(error.getvalue())
116 |
117 | error = StringIO()
118 |
119 | if errors:
120 | errors.append(
121 | "squawk: find detailed examples and solutions for each rule at "
122 | "https://squawkhq.com/docs/rules"
123 | )
124 |
125 | return errors
126 |
--------------------------------------------------------------------------------
/migration_lint/django/__init__.py:
--------------------------------------------------------------------------------
1 | """
2 | Migration linter for Django app.
3 |
4 | This app has `django` management command
5 | that efficiently handle MRs with many migrations.
6 | """
7 |
8 | default_app_config = "migration_lint.django.apps.MigrationLintDjangoConfig"
9 |
--------------------------------------------------------------------------------
/migration_lint/django/apps.py:
--------------------------------------------------------------------------------
1 | from django.apps import AppConfig
2 |
3 |
4 | class MigrationLintDjangoConfig(AppConfig):
5 | name = "migration_lint.django"
6 | label = "migration_lint_django"
7 |
--------------------------------------------------------------------------------
/migration_lint/django/extractor/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/PandaDoc/migration-lint/595a5026b3857b9213af2c0259e019525d6a38e0/migration_lint/django/extractor/__init__.py
--------------------------------------------------------------------------------
/migration_lint/django/extractor/django_management.py:
--------------------------------------------------------------------------------
1 | import os.path
2 |
3 | from migration_lint import logger
4 | from migration_lint.extractor.django import DjangoExtractor
5 |
6 | import django.apps
7 | from django.core.management import call_command, CommandError
8 |
9 |
10 | class DjangoManagementExtractor(DjangoExtractor):
11 | """Migrations extractor for Django migrations for management command."""
12 |
13 | NAME = "django_management"
14 |
15 | def extract_sql(self, migration_path: str) -> str:
16 | """Extract raw SQL from the migration file."""
17 |
18 | parts = migration_path.split("/")
19 | file_name = parts[-1]
20 | app = parts[-3]
21 | migration_name = file_name.replace(".py", "")
22 |
23 | # handle subapp app name
24 | for app_config in django.apps.apps.get_app_configs():
25 | app_path = os.path.relpath(app_config.path, ".")
26 | app_migrations_path = os.path.join(app_path, "migrations")
27 | if migration_path.startswith(app_migrations_path):
28 | app = app_config.label
29 | break
30 |
31 | logger.info(
32 | f"Extracting sql for migration: app={app}, migration_name={migration_name}"
33 | )
34 |
35 | try:
36 | return call_command("sqlmigrate", app, migration_name)
37 | except CommandError:
38 | logger.error(
39 | f"Failed to extract SQL for migration app={app}, migration_name={migration_name}"
40 | )
41 | return ""
42 |
--------------------------------------------------------------------------------
/migration_lint/django/management/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/PandaDoc/migration-lint/595a5026b3857b9213af2c0259e019525d6a38e0/migration_lint/django/management/__init__.py
--------------------------------------------------------------------------------
/migration_lint/django/management/commands/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/PandaDoc/migration-lint/595a5026b3857b9213af2c0259e019525d6a38e0/migration_lint/django/management/commands/__init__.py
--------------------------------------------------------------------------------
/migration_lint/django/management/commands/migration_lint.py:
--------------------------------------------------------------------------------
1 | import os
2 |
3 | from migration_lint import logger
4 | from migration_lint.analyzer import Analyzer, CompatibilityLinter, SquawkLinter
5 | from migration_lint.extractor import Extractor
6 | from migration_lint.source_loader import SourceLoader, LocalLoader
7 | from migration_lint.django.extractor.django_management import DjangoManagementExtractor # noqa
8 |
9 | from django.core.management.base import BaseCommand
10 |
11 | from migration_lint.util.env import get_bool_env
12 |
13 |
14 | class Command(BaseCommand):
15 | def add_arguments(self, parser):
16 | parser.add_argument(
17 | "--loader",
18 | dest="loader_type",
19 | type=str,
20 | choices=SourceLoader.names(),
21 | default=os.getenv("LOADER_TYPE", LocalLoader.NAME),
22 | help="loader type (where to take source files changes)",
23 | )
24 | parser.add_argument(
25 | "--only-new-files",
26 | dest="only_new_files",
27 | action="store_true",
28 | default=get_bool_env("ONLY_NEW_FILES", True),
29 | help="lint only new files, ignore changes in existing files",
30 | )
31 | parser.add_argument(
32 | "--gitlab-instance",
33 | dest="gitlab_instance",
34 | type=str,
35 | default=os.getenv("CI_SERVER_URL"),
36 | help="GitLab instance instance (protocol://host:port)",
37 | )
38 | parser.add_argument(
39 | "--project-id",
40 | dest="project_id",
41 | type=str,
42 | default=os.getenv("CI_PROJECT_ID"),
43 | help="GitLab project id (repo)",
44 | )
45 | parser.add_argument(
46 | "--gitlab-api-key",
47 | dest="gitlab_api_key",
48 | type=str,
49 | default=os.getenv("CI_DEPLOY_GITLAB_TOKEN"),
50 | help="api key for GitLab API",
51 | )
52 | parser.add_argument(
53 | "--branch",
54 | dest="branch",
55 | type=str,
56 | default=os.getenv(
57 | "CI_MERGE_REQUEST_SOURCE_BRANCH_NAME", os.getenv("CI_COMMIT_BRANCH")
58 | ),
59 | help="branch to compare",
60 | )
61 | parser.add_argument(
62 | "--mr-id",
63 | dest="mr_id",
64 | type=str,
65 | default=os.getenv("CI_MERGE_REQUEST_ID"),
66 | help="integer merge request id",
67 | )
68 | parser.add_argument(
69 | "--squawk-config-path",
70 | dest="squawk_config_path",
71 | type=str,
72 | default=os.getenv("MIGRATION_LINTER_SQUAWK_CONFIG_PATH"),
73 | help="squawk config path",
74 | )
75 | parser.add_argument(
76 | "--squawk-pg-version",
77 | dest="squawk_pg_version",
78 | type=str,
79 | default=os.getenv("MIGRATION_LINTER_SQUAWK_PG_VERSION"),
80 | help="squawk version of PostgreSQL",
81 | )
82 | parser.add_argument(
83 | "--ignore-extractor-fail",
84 | dest="ignore_extractor_fail",
85 | action="store_true",
86 | default=get_bool_env("MIGRATION_LINTER_IGNORE_EXTRACTOR_FAIL", False),
87 | help="Don't fail the whole linter if extraction of sql fails",
88 | )
89 | parser.add_argument(
90 | "--ignore-extractor-not-found",
91 | dest="ignore_extractor_not_found",
92 | action="store_true",
93 | default=get_bool_env("MIGRATION_LINTER_IGNORE_EXTRACTOR_NOT_FOUND", False),
94 | help="""
95 | Don't fail the whole linter if extraction went fine,
96 | but info about particular migration couldn't be found
97 | """,
98 | )
99 |
100 | def handle(self, loader_type, squawk_config_path, squawk_pg_version, **options):
101 | logger.info("Start analysis..")
102 |
103 | loader = SourceLoader.get(loader_type)(**options)
104 | extractor = Extractor.get("django_management")(**options)
105 | analyzer = Analyzer(
106 | loader=loader,
107 | extractor=extractor,
108 | linters=[
109 | CompatibilityLinter(),
110 | SquawkLinter(
111 | config_path=squawk_config_path,
112 | pg_version=squawk_pg_version,
113 | ),
114 | ],
115 | )
116 | analyzer.analyze()
117 |
--------------------------------------------------------------------------------
/migration_lint/extractor/__init__.py:
--------------------------------------------------------------------------------
1 | from migration_lint.extractor.base import Extractor
2 | from migration_lint.extractor.alembic import AlembicExtractor
3 | from migration_lint.extractor.django import DjangoExtractor
4 | from migration_lint.extractor.flyway import FlywayExtractor
5 |
6 | __all__ = (
7 | "Extractor",
8 | "AlembicExtractor",
9 | "DjangoExtractor",
10 | "FlywayExtractor",
11 | )
12 |
--------------------------------------------------------------------------------
/migration_lint/extractor/alembic.py:
--------------------------------------------------------------------------------
1 | import os
2 | import re
3 | import subprocess
4 | from io import StringIO
5 | from functools import lru_cache
6 |
7 | from migration_lint import logger
8 | from migration_lint.extractor.base import BaseExtractor
9 |
10 |
11 | class AlembicExtractor(BaseExtractor):
12 | """Migrations extractor for Alembic migrations."""
13 |
14 | NAME = "alembic"
15 |
16 | def __init__(self, **kwargs):
17 | super().__init__(**kwargs)
18 | self.command = kwargs.get("alembic_command") or "make sqlmigrate"
19 | self.migration_path = os.environ.get(
20 | "MIGRATION_LINT_ALEMBIC_MIGRATIONS_PATH", "/migrations/versions/"
21 | )
22 |
23 | def is_migration(self, path: str) -> bool:
24 | """Check if the specified file is a migration."""
25 |
26 | return (
27 | self.migration_path in path
28 | and path.endswith(".py")
29 | and "__init__.py" not in path
30 | )
31 |
32 | def is_allowed_with_backward_incompatible_migration(self, path: str) -> bool:
33 | """Check if the specified file changes are allowed with
34 | backward-incompatible migrations.
35 | """
36 |
37 | allowed_patterns = [
38 | r".*/tables\.py",
39 | r".*/constants\.py",
40 | r".*/enums\.py",
41 | rf".*{self.migration_path}.*\.py",
42 | r"^(?!.*\.py).*$",
43 | ]
44 | for pattern in allowed_patterns:
45 | if re.match(pattern, path):
46 | return True
47 |
48 | return False
49 |
50 | def extract_sql(self, migration_path: str) -> str:
51 | """Extract raw SQL from the migration file."""
52 |
53 | file_name = migration_path.split("/")[-1]
54 | parts = file_name.split("_")
55 | version = parts[1]
56 |
57 | logger.info(f"Extracting sql for migration: version={version}")
58 | logger.info(self.command)
59 |
60 | migrations_sql = self._get_migrations_sql()
61 |
62 | try:
63 | return migrations_sql[version]
64 | except KeyError:
65 | error_msg = (
66 | f"Couldn't find info about migration with version={version} "
67 | f"in alembic offline mode output"
68 | )
69 | if self.ignore_extractor_not_found:
70 | logger.error(error_msg)
71 | return ""
72 | raise RuntimeError(error_msg)
73 |
74 | @lru_cache(maxsize=1)
75 | def _get_migrations_sql(self):
76 | """Get raw SQL for all migrations."""
77 |
78 | try:
79 | lines = (
80 | subprocess.check_output(self.command.split(" "))
81 | .decode("utf-8")
82 | .split("\n")
83 | )
84 | except subprocess.CalledProcessError:
85 | logger.error("Failed to extract SQL for migrations")
86 | if self.ignore_extractor_fail:
87 | return {}
88 | raise
89 |
90 | migrations_sql = {}
91 |
92 | current_migration = None
93 | current_migration_sql = StringIO()
94 |
95 | for line in lines:
96 | m = re.match(r"-- Running upgrade \w* -> (\w*)", line)
97 | if m is not None:
98 | if current_migration is not None:
99 | migrations_sql[current_migration] = (
100 | current_migration_sql.getvalue().strip("\n")
101 | )
102 |
103 | current_migration = m.group(1)
104 | current_migration_sql = StringIO()
105 |
106 | elif "INSERT INTO alembic_version" in line:
107 | continue
108 | elif "UPDATE alembic_version" in line:
109 | continue
110 | elif line.startswith("/"):
111 | continue
112 | else:
113 | current_migration_sql.write(f"{line}\n")
114 |
115 | else:
116 | if current_migration is not None:
117 | migrations_sql[current_migration] = (
118 | current_migration_sql.getvalue().strip("\n")
119 | )
120 |
121 | return migrations_sql
122 |
--------------------------------------------------------------------------------
/migration_lint/extractor/base.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | import abc
4 | from typing import Any, Sequence, cast, Dict
5 |
6 | from migration_lint.source_loader.model import SourceDiff
7 | from migration_lint.extractor.model import (
8 | ExtendedSourceDiff,
9 | Migration,
10 | MigrationsMetadata,
11 | )
12 |
13 |
14 | class Extractor(type):
15 | """Metaclass for migrations extractors.
16 |
17 | This metaclass registers all its instances in the registry.
18 | """
19 |
20 | extractors: Dict[str, Extractor] = {}
21 |
22 | def __new__(
23 | mcls,
24 | name: str,
25 | bases: tuple[Extractor, ...],
26 | classdict: dict[str, Any],
27 | ) -> Extractor:
28 | cls = cast(Extractor, type.__new__(mcls, name, bases, classdict))
29 |
30 | if len(bases) > 0:
31 | # Not the base class.
32 | if not hasattr(cls, "NAME"):
33 | raise NotImplementedError(
34 | f"extractor {cls.__name__} doesn't provie name",
35 | )
36 |
37 | mcls.extractors[cls.NAME] = cls
38 |
39 | return cls
40 |
41 | @classmethod
42 | def names(mcls) -> Sequence[str]:
43 | """Get the names of all registered extractors."""
44 |
45 | return list(mcls.extractors.keys())
46 |
47 | @classmethod
48 | def get(mcls, name: str) -> Extractor:
49 | """Get a registered extractor by its name."""
50 |
51 | return mcls.extractors[name]
52 |
53 |
54 | class BaseExtractor(metaclass=Extractor):
55 | """Base class for migrations extractor."""
56 |
57 | def __init__(self, **kwargs) -> None:
58 | self.ignore_extractor_fail = kwargs.get("ignore_extractor_fail")
59 | self.ignore_extractor_not_found = kwargs.get("ignore_extractor_not_found")
60 |
61 | def create_metadata(
62 | self,
63 | changed_files: Sequence[SourceDiff],
64 | ) -> MigrationsMetadata:
65 | """Create migrations metadata by the list of changed files."""
66 |
67 | metadata = MigrationsMetadata()
68 | for changed_file in changed_files:
69 | path = changed_file.path
70 |
71 | metadata.changed_files.append(
72 | ExtendedSourceDiff.of_source_diff(
73 | changed_file,
74 | self.is_allowed_with_backward_incompatible_migration(path),
75 | ),
76 | )
77 |
78 | if self.is_migration(path):
79 | metadata.migrations.append(
80 | Migration(path=path, raw_sql=self.extract_sql(path)),
81 | )
82 |
83 | return metadata
84 |
85 | @abc.abstractmethod
86 | def is_migration(self, path: str) -> bool:
87 | """Check if the specified file is a migration."""
88 |
89 | raise NotImplementedError()
90 |
91 | @abc.abstractmethod
92 | def is_allowed_with_backward_incompatible_migration(self, path: str) -> bool:
93 | """Check if the specified file changes are allowed with
94 | backward-incompatible migrations.
95 | """
96 |
97 | raise NotImplementedError()
98 |
99 | @abc.abstractmethod
100 | def extract_sql(self, migration_path: str) -> str:
101 | """Extract raw SQL from the migration file."""
102 |
103 | raise NotImplementedError()
104 |
--------------------------------------------------------------------------------
/migration_lint/extractor/django.py:
--------------------------------------------------------------------------------
1 | import re
2 | import subprocess
3 |
4 | from migration_lint import logger
5 | from migration_lint.extractor.base import BaseExtractor
6 |
7 |
8 | class DjangoExtractor(BaseExtractor):
9 | """Migrations extractor for Django migrations."""
10 |
11 | NAME = "django"
12 |
13 | def __init__(self, **kwargs):
14 | super().__init__(**kwargs)
15 | # self.command = "python manage.py sqlmigrate {app} {migration_name}"
16 | self.command = "make sqlmigrate app={app} migration={migration_name}"
17 | self.skip_lines = 2
18 |
19 | def is_migration(self, path: str) -> bool:
20 | """Check if the specified file is a migration."""
21 |
22 | return (
23 | "/migrations/" in path
24 | and path.endswith(".py")
25 | and "__init__.py" not in path
26 | )
27 |
28 | def is_allowed_with_backward_incompatible_migration(self, path: str) -> bool:
29 | """Check if the specified file changes are allowed with
30 | backward-incompatible migrations.
31 | """
32 |
33 | allowed_patterns = [
34 | r".*/models\.py",
35 | r".*/constants\.py",
36 | r".*/enums\.py",
37 | r".*/migrations/.*\.py",
38 | r"^(?!.*\.py).*$",
39 | ]
40 | for pattern in allowed_patterns:
41 | if re.match(pattern, path):
42 | return True
43 |
44 | return False
45 |
46 | def extract_sql(self, migration_path: str) -> str:
47 | """Extract raw SQL from the migration file."""
48 |
49 | parts = migration_path.split("/")
50 | file_name = parts[-1]
51 | app = parts[-3]
52 | migration_name = file_name.replace(".py", "")
53 |
54 | logger.info(
55 | f"Extracting sql for migration: app={app}, migration_name={migration_name}"
56 | )
57 |
58 | try:
59 | output = subprocess.check_output(
60 | self.command.format(app=app, migration_name=migration_name).split(" ")
61 | ).decode("utf-8")
62 | except subprocess.CalledProcessError:
63 | logger.error(
64 | f"Failed to extract SQL for migration app={app}, migration_name={migration_name}"
65 | )
66 | if self.ignore_extractor_fail:
67 | return ""
68 | raise
69 | return "\n".join(output.split("\n")[self.skip_lines :])
70 |
--------------------------------------------------------------------------------
/migration_lint/extractor/flyway.py:
--------------------------------------------------------------------------------
1 | from migration_lint.extractor.raw_sql import RawSqlExtractor
2 |
3 |
4 | class FlywayExtractor(RawSqlExtractor):
5 | """Migrations extractor for Flyway migrations."""
6 |
7 | NAME = "flyway"
8 |
9 | def is_migration(self, path: str) -> bool:
10 | """Check if the specified file is a migration."""
11 |
12 | return "/db/migration/" in path and path.endswith(".sql")
13 |
14 | def is_allowed_with_backward_incompatible_migration(self, path: str) -> bool:
15 | """Check if the specified file changes are allowed with
16 | backward-incompatible migrations.
17 | """
18 |
19 | return self.is_migration(path)
20 |
--------------------------------------------------------------------------------
/migration_lint/extractor/model.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | from dataclasses import dataclass, field
4 | from typing import List
5 |
6 | from migration_lint.source_loader.model import SourceDiff
7 |
8 |
9 | @dataclass
10 | class Migration:
11 | """A database migration file representation.
12 |
13 | - path -- path to the migration file;
14 | - raw_sql -- raw SQL representation of the migration.
15 | """
16 |
17 | path: str
18 | raw_sql: str
19 |
20 |
21 | @dataclass
22 | class ExtendedSourceDiff(SourceDiff):
23 | """An object desribing a single changed file.
24 |
25 | - `path` -- the path to the file;
26 | - `old_path` -- the previous path to the file; differs from `path` if the
27 | file was renamed;
28 | - `diff` -- difference between versions (if present).
29 | - `allowed_with_backward_incompatible` -- is this file is allowed with
30 | backward-incompatible migrations.
31 | """
32 |
33 | allowed_with_backward_incompatible: bool = False
34 |
35 | @classmethod
36 | def of_source_diff(
37 | cls,
38 | source_diff: SourceDiff,
39 | allowed_with_backward_incompatible,
40 | ) -> ExtendedSourceDiff:
41 | """Create an instance by the given source diff."""
42 |
43 | return cls(
44 | path=source_diff.path,
45 | old_path=source_diff.old_path,
46 | diff=source_diff.diff,
47 | allowed_with_backward_incompatible=allowed_with_backward_incompatible,
48 | )
49 |
50 |
51 | @dataclass
52 | class MigrationsMetadata:
53 | """Migrations metadata.
54 |
55 | - `changed_files` -- a list of changed files;
56 | - `migrations` -- a list of migrations.
57 | """
58 |
59 | changed_files: List[ExtendedSourceDiff] = field(default_factory=list)
60 | migrations: List[Migration] = field(default_factory=list)
61 |
--------------------------------------------------------------------------------
/migration_lint/extractor/raw_sql.py:
--------------------------------------------------------------------------------
1 | from migration_lint.extractor.base import BaseExtractor
2 |
3 |
4 | class RawSqlExtractor(BaseExtractor):
5 | """Migrations extractor for SQL files migrations."""
6 |
7 | NAME = "raw_sql"
8 |
9 | def is_migration(self, path: str) -> bool:
10 | """Check if the specified file is a migration."""
11 |
12 | return path.endswith(".sql")
13 |
14 | def is_allowed_with_backward_incompatible_migration(self, path: str) -> bool:
15 | """Check if the specified file changes are allowed with
16 | backward-incompatible migrations.
17 | """
18 |
19 | return self.is_migration(path)
20 |
21 | def extract_sql(self, migration_path: str) -> str:
22 | """Extract raw SQL from the migration file."""
23 |
24 | try:
25 | with open(migration_path, "r") as f:
26 | return f.read()
27 | except:
28 | if self.ignore_extractor_fail:
29 | return ""
30 | raise
31 |
--------------------------------------------------------------------------------
/migration_lint/main.py:
--------------------------------------------------------------------------------
1 | import os
2 |
3 | import click
4 |
5 | from migration_lint import logger
6 | from migration_lint.analyzer import Analyzer, CompatibilityLinter, SquawkLinter
7 | from migration_lint.extractor import Extractor, DjangoExtractor
8 | from migration_lint.source_loader import SourceLoader, LocalLoader
9 | from migration_lint.util.env import get_bool_env
10 |
11 |
12 | @click.command()
13 | # Base setup
14 | @click.option(
15 | "--loader",
16 | "loader_type",
17 | help="loader type (where to take source files changes)",
18 | type=click.Choice(SourceLoader.names(), case_sensitive=False),
19 | default=os.getenv("LOADER_TYPE", LocalLoader.NAME),
20 | )
21 | @click.option(
22 | "--extractor",
23 | "extractor_type",
24 | help="extractor type (how to extract SQL from migrations)",
25 | type=click.Choice(Extractor.names(), case_sensitive=False),
26 | default=os.getenv("EXTRACTOR", DjangoExtractor.NAME),
27 | )
28 | @click.option(
29 | "--only-new-files",
30 | help="lint only new files, ignore changes in existing files",
31 | default=get_bool_env("ONLY_NEW_FILES", True),
32 | )
33 | # gitlab-specific arguments
34 | @click.option(
35 | "--project-id",
36 | help="GitLab project id (repo)",
37 | default=os.getenv("CI_PROJECT_ID"),
38 | )
39 | @click.option(
40 | "--gitlab-instance",
41 | help="GitLab instance instance (protocol://host:port)",
42 | default=os.getenv("CI_SERVER_URL"),
43 | )
44 | @click.option(
45 | "--gitlab-api-key",
46 | help="api key for GitLab API",
47 | default=os.getenv("CI_DEPLOY_GITLAB_TOKEN"),
48 | )
49 | @click.option(
50 | "--branch",
51 | help="branch to compare",
52 | default=os.getenv(
53 | "CI_MERGE_REQUEST_SOURCE_BRANCH_NAME", os.getenv("CI_COMMIT_BRANCH")
54 | ),
55 | )
56 | @click.option(
57 | "--mr-id",
58 | help="integer merge request id",
59 | default=os.getenv("CI_MERGE_REQUEST_ID"),
60 | )
61 | @click.option(
62 | "--squawk-config-path",
63 | "squawk_config_path",
64 | help="squawk config path",
65 | default=os.getenv("MIGRATION_LINTER_SQUAWK_CONFIG_PATH"),
66 | )
67 | @click.option(
68 | "--squawk-pg-version",
69 | "squawk_pg_version",
70 | help="squawk version of PostgreSQL",
71 | default=os.getenv("MIGRATION_LINTER_SQUAWK_PG_VERSION"),
72 | )
73 | @click.option(
74 | "--alembic-command",
75 | "alembic_command",
76 | help="command to get Alembic migrations sql",
77 | default=os.getenv("MIGRATION_LINTER_ALEMBIC_COMMAND"),
78 | )
79 | @click.option(
80 | "--ignore-extractor-fail",
81 | "ignore_extractor_fail",
82 | is_flag=True,
83 | help="Don't fail the whole linter if extraction of sql fails",
84 | default=os.getenv("MIGRATION_LINTER_IGNORE_EXTRACTOR_FAIL", False),
85 | )
86 | @click.option(
87 | "--ignore-extractor-not-found",
88 | "ignore_extractor_not_found",
89 | is_flag=True,
90 | help="Don't fail the whole linter if extraction went fine, but info about particular migration couldn't be found",
91 | default=os.getenv("MIGRATION_LINTER_IGNORE_EXTRACTOR_NOT_FOUND", False),
92 | )
93 | def main(
94 | loader_type: str,
95 | extractor_type: str,
96 | squawk_config_path: str,
97 | squawk_pg_version: str,
98 | **kwargs,
99 | ) -> None:
100 | logger.info("Start analysis..")
101 |
102 | loader = SourceLoader.get(loader_type)(**kwargs)
103 | extractor = Extractor.get(extractor_type)(**kwargs)
104 | analyzer = Analyzer(
105 | loader=loader,
106 | extractor=extractor,
107 | linters=[
108 | CompatibilityLinter(),
109 | SquawkLinter(
110 | config_path=squawk_config_path,
111 | pg_version=squawk_pg_version,
112 | ),
113 | ],
114 | )
115 | analyzer.analyze()
116 |
117 |
118 | if __name__ == "__main__":
119 | main()
120 |
--------------------------------------------------------------------------------
/migration_lint/source_loader/__init__.py:
--------------------------------------------------------------------------------
1 | from migration_lint.source_loader.base import SourceLoader
2 | from migration_lint.source_loader.local import LocalLoader
3 | from migration_lint.source_loader.gitlab import GitlabBranchLoader, GitlabMRLoader
4 |
5 | __all__ = (
6 | "SourceLoader",
7 | "LocalLoader",
8 | "GitlabBranchLoader",
9 | "GitlabMRLoader",
10 | )
11 |
--------------------------------------------------------------------------------
/migration_lint/source_loader/base.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | import abc
4 | from typing import Any, Sequence, cast, Dict
5 |
6 | from migration_lint.source_loader.model import SourceDiff
7 |
8 |
9 | class SourceLoader(type):
10 | """Metaclass for source loaders.
11 |
12 | This metaclass register all its instances in the registry.
13 | """
14 |
15 | source_loaders: Dict[str, SourceLoader] = {}
16 |
17 | def __new__(
18 | mcls,
19 | name: str,
20 | bases: tuple[SourceLoader, ...],
21 | classdict: Dict[str, Any],
22 | ) -> SourceLoader:
23 | cls = cast(SourceLoader, type.__new__(mcls, name, bases, classdict))
24 |
25 | if len(bases) > 0:
26 | # Not the base class.
27 | if not hasattr(cls, "NAME"):
28 | raise NotImplementedError(
29 | f"source loader {cls.__name__} doesn't provide name",
30 | )
31 |
32 | mcls.source_loaders[cls.NAME] = cls
33 |
34 | return cls
35 |
36 | @classmethod
37 | def names(mcls) -> Sequence[str]:
38 | """Get the names of all registered source loaders."""
39 |
40 | return list(mcls.source_loaders.keys())
41 |
42 | @classmethod
43 | def get(mcls, name: str) -> SourceLoader:
44 | """Get a registered source loader by its name."""
45 |
46 | return mcls.source_loaders[name]
47 |
48 |
49 | class BaseSourceLoader(metaclass=SourceLoader):
50 | """Base class for changes source files loader."""
51 |
52 | def __init__(self, only_new_files: bool, **kwargs: Any) -> None:
53 | self.only_new_files = only_new_files
54 |
55 | @abc.abstractmethod
56 | def get_changed_files(self) -> Sequence[SourceDiff]:
57 | """Return a list of changed files."""
58 |
59 | raise NotImplementedError()
60 |
--------------------------------------------------------------------------------
/migration_lint/source_loader/gitlab.py:
--------------------------------------------------------------------------------
1 | import json
2 | import os
3 | from typing import Any, Sequence
4 | from urllib.parse import urljoin
5 | from urllib.request import Request, urlopen
6 |
7 | from migration_lint import logger
8 | from migration_lint.source_loader.base import BaseSourceLoader
9 | from migration_lint.source_loader.model import SourceDiff
10 |
11 |
12 | class GitlabBranchLoader(BaseSourceLoader):
13 | """A loader to obtain files changed in the given branch comparing to
14 | master.
15 | """
16 |
17 | NAME = "gitlab_branch"
18 |
19 | def __init__(
20 | self,
21 | branch: str,
22 | project_id: str,
23 | gitlab_api_key: str,
24 | gitlab_instance: str,
25 | **kwargs: Any,
26 | ) -> None:
27 | super().__init__(**kwargs)
28 | self.base_url = gitlab_instance
29 | self.branch = branch
30 | self.project_id = project_id
31 | self.gitlab_api_key = gitlab_api_key
32 | self.default_branch = os.environ.get("CI_DEFAULT_BRANCH", "master")
33 |
34 | if not self.branch or not self.project_id or not self.gitlab_api_key:
35 | raise RuntimeError(
36 | f"You must specify branch, project_id and gitlab_api_key "
37 | f"to use GitlabBranchLoader (branch={self.branch}, project_id={self.project_id}, "
38 | f"gitlab_api_key={self.gitlab_api_key})"
39 | )
40 |
41 | def get_changed_files(self) -> Sequence[SourceDiff]:
42 | """Return a list of changed files."""
43 |
44 | logger.info(
45 | f"### Getting changed files from {self.default_branch} <-> {self.branch}"
46 | )
47 |
48 | endpoint = f"projects/{self.project_id}/repository/compare?"
49 | query_params = f"from={self.default_branch}&to={self.branch}"
50 | url = urljoin(f"{self.base_url}/api/v4/", endpoint + query_params)
51 | req = Request(url, headers={"PRIVATE-TOKEN": self.gitlab_api_key})
52 | with urlopen(req) as resp:
53 | diffs = json.loads(resp.read().decode("utf-8"))["diffs"]
54 |
55 | return [
56 | SourceDiff(
57 | diff=diff["diff"],
58 | path=diff["new_path"],
59 | old_path=diff["old_path"],
60 | )
61 | for diff in diffs
62 | if not diff["deleted_file"]
63 | and (not self.only_new_files or self.only_new_files and diff["new_file"])
64 | ]
65 |
66 |
67 | class GitlabMRLoader(BaseSourceLoader):
68 | """A loader to obtain files changed in the given MR."""
69 |
70 | NAME = "gitlab_mr"
71 |
72 | def __init__(
73 | self,
74 | mr_id: str,
75 | project_id: str,
76 | gitlab_api_key: str,
77 | gitlab_instance: str,
78 | **kwargs: Any,
79 | ) -> None:
80 | super().__init__(**kwargs)
81 | self.base_url = gitlab_instance
82 | self.mr_id = mr_id
83 | self.project_id = project_id
84 | self.gitlab_api_key = gitlab_api_key
85 |
86 | if not self.mr_id or not self.project_id or not self.gitlab_api_key:
87 | raise RuntimeError(
88 | f"You must specify mr_id, project_id and gitlab_api_key "
89 | f"to use GitlabMRLoader (mr_id={self.mr_id}, project_id={self.project_id}, "
90 | f"gitlab_api_key={self.gitlab_api_key})"
91 | )
92 |
93 | def get_changed_files(self) -> Sequence[SourceDiff]:
94 | """Return a list of changed files."""
95 |
96 | logger.info(f"### Getting changed files from MR: {self.mr_id}")
97 |
98 | endpoint = f"projects/{self.project_id}/merge_requests/{self.mr_id}"
99 | url = urljoin(f"{self.base_url}/api/v4/", endpoint)
100 | req = Request(url, headers={"PRIVATE-TOKEN": self.gitlab_api_key})
101 | with urlopen(req) as resp:
102 | mr_info = json.loads(resp.read().decode("utf-8"))
103 |
104 | logger.info(f"MR link: {mr_info['web_url']}")
105 |
106 | endpoint = f"projects/{self.project_id}/merge_requests/{self.mr_id}/diffs"
107 | url = urljoin(f"{self.base_url}/api/v4/", endpoint)
108 | req = Request(url, headers={"PRIVATE-TOKEN": self.gitlab_api_key})
109 | with urlopen(req) as resp:
110 | diffs = json.loads(resp.read().decode("utf-8"))
111 |
112 | return [
113 | SourceDiff(
114 | diff=diff["diff"],
115 | path=diff["new_path"],
116 | old_path=diff["old_path"],
117 | )
118 | for diff in diffs
119 | if not diff["deleted_file"]
120 | and (not self.only_new_files or self.only_new_files and diff["new_file"])
121 | ]
122 |
--------------------------------------------------------------------------------
/migration_lint/source_loader/local.py:
--------------------------------------------------------------------------------
1 | import os
2 | from typing import Sequence
3 |
4 | from migration_lint import logger
5 | from migration_lint.source_loader.base import BaseSourceLoader
6 | from migration_lint.source_loader.model import SourceDiff
7 |
8 |
9 | class LocalLoader(BaseSourceLoader):
10 | """A loader to obtain files changed for local stashed files."""
11 |
12 | NAME = "local_git"
13 |
14 | def get_changed_files(self) -> Sequence[SourceDiff]:
15 | """Return a list of changed files."""
16 |
17 | from git import Repo
18 |
19 | logger.info("### Getting changed files for local stashed files")
20 |
21 | repo = Repo(os.getcwd(), search_parent_directories=True)
22 | diffs = repo.head.commit.diff(None)
23 | filtered_diffs = [
24 | d
25 | for d in diffs
26 | if not d.deleted_file
27 | and (not self.only_new_files or self.only_new_files and d.new_file)
28 | ]
29 |
30 | logger.info("Files changed: ")
31 | logger.info("\n".join([f"- {d.a_path}" for d in filtered_diffs]))
32 |
33 | return [
34 | SourceDiff(old_path=diff.a_path, path=diff.b_path)
35 | for diff in filtered_diffs
36 | ]
37 |
--------------------------------------------------------------------------------
/migration_lint/source_loader/model.py:
--------------------------------------------------------------------------------
1 | from dataclasses import dataclass
2 | from typing import Optional
3 |
4 |
5 | @dataclass
6 | class SourceDiff:
7 | """An object describing a single changed file.
8 |
9 | - `path` -- the path to the file;
10 | - `old_path` -- the previous path to the file; differs from `path` if the
11 | file was renamed;
12 | - `diff` -- difference between versions (if present).
13 | """
14 |
15 | path: str
16 | old_path: str = ""
17 | diff: Optional[str] = None
18 |
19 | def __post_init__(self):
20 | if not self.old_path:
21 | self.old_path = self.path
22 |
--------------------------------------------------------------------------------
/migration_lint/sql/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/PandaDoc/migration-lint/595a5026b3857b9213af2c0259e019525d6a38e0/migration_lint/sql/__init__.py
--------------------------------------------------------------------------------
/migration_lint/sql/constants.py:
--------------------------------------------------------------------------------
1 | import enum
2 |
3 | from migration_lint.util.colors import grey, green, red, yellow
4 |
5 |
6 | class StatementType(str, enum.Enum):
7 | """Types of migration statements."""
8 |
9 | IGNORED = "ignored"
10 | BACKWARD_COMPATIBLE = "backward_compatible"
11 | BACKWARD_INCOMPATIBLE = "backward_incompatible"
12 | DATA_MIGRATION = "data_migration"
13 | RESTRICTED = "restricted"
14 | UNSUPPORTED = "unsupported"
15 |
16 | @property
17 | def colorized(self):
18 | if self is StatementType.IGNORED:
19 | return grey(self)
20 | elif self is StatementType.BACKWARD_COMPATIBLE:
21 | return green(self)
22 | elif self is StatementType.BACKWARD_INCOMPATIBLE:
23 | return red(self)
24 | elif self is StatementType.DATA_MIGRATION:
25 | return red(self)
26 | elif self is StatementType.RESTRICTED:
27 | return yellow(self)
28 | elif self is StatementType.UNSUPPORTED:
29 | return red(self)
30 |
--------------------------------------------------------------------------------
/migration_lint/sql/model.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | import dataclasses
4 | from typing import Optional, List
5 |
6 |
7 | @dataclasses.dataclass
8 | class SegmentLocator:
9 | """A locator object used to find segments.
10 |
11 | - `type` -- string used to match the type of operation (see sqlfluff types);
12 | - `raw` -- string used to match segment by raw SQL;
13 | - `children` -- list of locators used to match descendant segments (on any
14 | level);
15 | - `inverted` --- flag to perform an "inverted" match; works only for
16 | children, so it's actually "segment that doesn't contain specific
17 | descendant".
18 | - `ignore_order` --- by default linter looks for keywords in order,
19 | use this flag to search regardless
20 | - `only_with` --- if statement comes with this statement in one migration, it's safe,
21 | for example if we create table and add foreign key in one migration, it's safe,
22 | if we add foreign key on existing big table - it's not.
23 | """
24 |
25 | type: str
26 | raw: Optional[str] = None
27 | children: Optional[List[SegmentLocator]] = None
28 | inverted: bool = False
29 | ignore_order: bool = False
30 | only_with: Optional[ConditionalMatch] = None
31 |
32 |
33 | @dataclasses.dataclass
34 | class KeywordLocator(SegmentLocator):
35 | """A locator object used to find segments by keyword."""
36 |
37 | type: str = "keyword"
38 |
39 |
40 | @dataclasses.dataclass
41 | class ConditionalMatch:
42 | """
43 | An object to segment by condition
44 | (for example CREATE TABLE and ALTER TABLE for the same table).
45 | See sql/rules.py for examples.
46 |
47 | - `locator` -- what segment to look for.
48 | - `match_by` -- how to match two segments (by table name, column name etc);
49 | """
50 |
51 | locator: SegmentLocator
52 | match_by: SegmentLocator
53 |
--------------------------------------------------------------------------------
/migration_lint/sql/operations.py:
--------------------------------------------------------------------------------
1 | from typing import Optional, List
2 |
3 | from sqlfluff.core.parser import BaseSegment, PositionMarker
4 | from sqlfluff.dialects.dialect_ansi import StatementSegment
5 |
6 | from migration_lint.sql.model import SegmentLocator
7 |
8 |
9 | def find_matching_segment(
10 | segment: BaseSegment,
11 | locator: SegmentLocator,
12 | min_position: Optional[PositionMarker] = None,
13 | context: Optional[List[StatementSegment]] = None,
14 | ) -> Optional[BaseSegment]:
15 | """Find matching segment by the given locator starting with the given
16 | position.
17 | """
18 |
19 | for found in segment.recursive_crawl(locator.type):
20 | if (
21 | not locator.ignore_order
22 | and min_position
23 | and found.pos_marker
24 | and found.pos_marker < min_position
25 | ):
26 | continue
27 | attrs_match = True
28 | if locator.raw and locator.raw.upper() != found.raw.upper():
29 | attrs_match = False
30 |
31 | children_match = True
32 | if locator.children:
33 | min_position = None
34 | for child_locator in locator.children:
35 | child_found = find_matching_segment(
36 | segment=found,
37 | locator=child_locator,
38 | min_position=min_position,
39 | )
40 | if (
41 | not child_locator.inverted
42 | and not child_found
43 | or child_locator.inverted
44 | and child_found
45 | ):
46 | children_match = False
47 | if child_found:
48 | min_position = child_found.pos_marker
49 |
50 | # unsafe segment still can be safe if paired with specific
51 | only_with_match = locator.only_with is None
52 | if locator.only_with and context:
53 | match_by_original = find_matching_segment(
54 | segment, locator.only_with.match_by
55 | )
56 | for context_statement in context:
57 | # searching in the same migration
58 | found_context_segment = find_matching_segment(
59 | context_statement, locator.only_with.locator
60 | )
61 | if not found_context_segment:
62 | continue
63 |
64 | # matching in context
65 | # (for example checking table that being created
66 | # is the same as being altered)
67 | match_by_context = find_matching_segment(
68 | found_context_segment, locator.only_with.match_by
69 | )
70 | if (
71 | match_by_original
72 | and match_by_context
73 | and match_by_original.raw_normalized()
74 | == match_by_context.raw_normalized()
75 | ):
76 | only_with_match = True
77 |
78 | if attrs_match and children_match and only_with_match:
79 | return found
80 |
81 | return None
82 |
--------------------------------------------------------------------------------
/migration_lint/sql/parser.py:
--------------------------------------------------------------------------------
1 | from typing import Tuple, Sequence, List
2 |
3 | from sqlfluff.api.simple import get_simple_config
4 | from sqlfluff.core import Linter
5 | from sqlfluff.dialects.dialect_ansi import StatementSegment
6 |
7 | from migration_lint.sql.constants import StatementType
8 | from migration_lint.sql.operations import find_matching_segment
9 | from migration_lint.sql.rules import (
10 | BACKWARD_INCOMPATIBLE_OPERATIONS,
11 | BACKWARD_COMPATIBLE_OPERATIONS,
12 | DATA_MIGRATION_OPERATIONS,
13 | RESTRICTED_OPERATIONS,
14 | IGNORED_OPERATIONS,
15 | )
16 |
17 |
18 | def classify_migration(raw_sql: str) -> Sequence[Tuple[str, StatementType]]:
19 | """Classify migration statements."""
20 |
21 | linter = Linter(config=get_simple_config(dialect="postgres"))
22 | result = linter.parse_string(raw_sql)
23 | parsed = result.root_variant()
24 | if not parsed or not parsed.tree:
25 | raise RuntimeError(f"Can't parse SQL from string: {raw_sql}")
26 |
27 | unparsable_parts = [part for part in parsed.tree.recursive_crawl("unparsable")]
28 | if not parsed or unparsable_parts:
29 | errors = [
30 | str(e.description)
31 | for e in parsed.lexing_violations + parsed.parsing_violations
32 | ]
33 | raise RuntimeError(
34 | f"Can't parse SQL from string: {raw_sql}\n"
35 | f"Errors: \n" + "\n- ".join(errors)
36 | )
37 |
38 | statements = []
39 | statements_types = []
40 | for statement in parsed.tree.recursive_crawl("statement"):
41 | statements.append(statement)
42 |
43 | for statement in statements:
44 | statement_type = classify_statement(statement, context=statements) # type: ignore
45 | if statement_type == StatementType.IGNORED:
46 | continue
47 | statements_types.append((statement.raw_normalized(), statement_type))
48 |
49 | return statements_types
50 |
51 |
52 | def classify_statement(
53 | statement: StatementSegment, context: List[StatementSegment]
54 | ) -> StatementType:
55 | """
56 | Classify an SQL statement using predefined locators.
57 | :param statement: statement to classify
58 | :param context: all statements in the same migration
59 | :return:
60 | """
61 |
62 | statement_operations_map = {
63 | StatementType.IGNORED: IGNORED_OPERATIONS,
64 | StatementType.DATA_MIGRATION: DATA_MIGRATION_OPERATIONS,
65 | StatementType.BACKWARD_COMPATIBLE: BACKWARD_COMPATIBLE_OPERATIONS,
66 | StatementType.BACKWARD_INCOMPATIBLE: BACKWARD_INCOMPATIBLE_OPERATIONS,
67 | StatementType.RESTRICTED: RESTRICTED_OPERATIONS,
68 | }
69 |
70 | for statement_type, operations_locators in statement_operations_map.items():
71 | for locator in operations_locators:
72 | found_segment = find_matching_segment(
73 | segment=statement,
74 | locator=locator,
75 | context=context,
76 | )
77 | if found_segment:
78 | return statement_type
79 |
80 | return StatementType.UNSUPPORTED
81 |
--------------------------------------------------------------------------------
/migration_lint/sql/rules.py:
--------------------------------------------------------------------------------
1 | from typing import List
2 |
3 | from migration_lint.sql.model import ConditionalMatch, KeywordLocator, SegmentLocator
4 |
5 | BACKWARD_INCOMPATIBLE_OPERATIONS: List[SegmentLocator] = [
6 | SegmentLocator(type="drop_sequence_statement"),
7 | # TODO: drop constraints first?
8 | SegmentLocator(type="drop_table_statement"),
9 | SegmentLocator(
10 | type="alter_table_statement",
11 | children=[
12 | KeywordLocator(raw="ALTER"),
13 | KeywordLocator(raw="COLUMN"),
14 | KeywordLocator(raw="DROP"),
15 | KeywordLocator(raw="DEFAULT"),
16 | ],
17 | ),
18 | SegmentLocator(
19 | type="alter_table_statement",
20 | children=[
21 | KeywordLocator(raw="ALTER"),
22 | KeywordLocator(raw="COLUMN"),
23 | KeywordLocator(raw="SET"),
24 | KeywordLocator(raw="NOT"),
25 | KeywordLocator(raw="NULL"),
26 | ],
27 | ),
28 | SegmentLocator(
29 | type="alter_table_statement",
30 | children=[
31 | KeywordLocator(raw="ADD"),
32 | KeywordLocator(raw="COLUMN"),
33 | KeywordLocator(raw="PRIMARY"),
34 | KeywordLocator(raw="KEY"),
35 | ],
36 | ),
37 | SegmentLocator(
38 | type="alter_table_statement",
39 | children=[
40 | KeywordLocator(raw="ADD"),
41 | KeywordLocator(raw="CONSTRAINT"),
42 | KeywordLocator(raw="PRIMARY"),
43 | KeywordLocator(raw="KEY"),
44 | ],
45 | ),
46 | SegmentLocator(
47 | type="alter_table_statement",
48 | children=[
49 | KeywordLocator(raw="DROP"),
50 | KeywordLocator(raw="COLUMN"),
51 | ],
52 | ),
53 | SegmentLocator(
54 | type="alter_table_statement",
55 | children=[
56 | KeywordLocator(raw="RENAME"),
57 | KeywordLocator(raw="COLUMN"),
58 | ],
59 | ),
60 | SegmentLocator(type="truncate_table"),
61 | SegmentLocator(type="drop_type_statement"),
62 | ]
63 |
64 | BACKWARD_COMPATIBLE_OPERATIONS: List[SegmentLocator] = [
65 | SegmentLocator(
66 | type="create_index_statement", children=[KeywordLocator(raw="CONCURRENTLY")]
67 | ),
68 | SegmentLocator(
69 | type="drop_index_statement", children=[KeywordLocator(raw="CONCURRENTLY")]
70 | ),
71 | SegmentLocator(
72 | type="alter_index_statement", children=[KeywordLocator(raw="RENAME")]
73 | ),
74 | SegmentLocator(
75 | type="reindex_statement_segment", children=[KeywordLocator(raw="CONCURRENTLY")]
76 | ),
77 | SegmentLocator(type="create_sequence_statement"),
78 | SegmentLocator(type="alter_sequence_statement"),
79 | SegmentLocator(type="create_table_statement"),
80 | SegmentLocator(
81 | type="alter_table_statement",
82 | children=[
83 | KeywordLocator(raw="ADD"),
84 | KeywordLocator(raw="COLUMN"),
85 | KeywordLocator(raw="NOT", inverted=True),
86 | KeywordLocator(raw="NULL"),
87 | ],
88 | ),
89 | SegmentLocator(
90 | type="alter_table_statement",
91 | children=[
92 | KeywordLocator(raw="ALTER"),
93 | KeywordLocator(raw="COLUMN"),
94 | KeywordLocator(raw="DROP"),
95 | KeywordLocator(raw="NOT"),
96 | KeywordLocator(raw="NULL"),
97 | ],
98 | ),
99 | # it's ok to do "SET NOT NULL" only after explicit "VALIDATE CONSTRAINT"
100 | SegmentLocator(
101 | type="alter_table_statement",
102 | children=[
103 | KeywordLocator(raw="ALTER"),
104 | KeywordLocator(raw="COLUMN"),
105 | KeywordLocator(raw="SET"),
106 | KeywordLocator(raw="NOT"),
107 | KeywordLocator(raw="NULL"),
108 | ],
109 | only_with=ConditionalMatch(
110 | SegmentLocator(
111 | type="alter_table_statement",
112 | children=[
113 | KeywordLocator(raw="VALIDATE"),
114 | KeywordLocator(raw="CONSTRAINT"),
115 | ],
116 | ),
117 | match_by=SegmentLocator(type="table_reference"),
118 | ),
119 | ),
120 | SegmentLocator(
121 | type="alter_table_statement",
122 | children=[
123 | KeywordLocator(raw="ADD"),
124 | KeywordLocator(raw="COLUMN"),
125 | KeywordLocator(raw="NOT"),
126 | KeywordLocator(raw="NULL"),
127 | KeywordLocator(raw="DEFAULT", ignore_order=True),
128 | ],
129 | ),
130 | SegmentLocator(
131 | type="alter_table_statement",
132 | children=[
133 | KeywordLocator(raw="ALTER"),
134 | KeywordLocator(raw="COLUMN"),
135 | KeywordLocator(raw="SET"),
136 | KeywordLocator(raw="DEFAULT"),
137 | ],
138 | ),
139 | SegmentLocator(
140 | type="alter_table_statement",
141 | children=[
142 | KeywordLocator(raw="ADD"),
143 | KeywordLocator(raw="CONSTRAINT"),
144 | KeywordLocator(raw="UNIQUE"),
145 | KeywordLocator(raw="USING"),
146 | KeywordLocator(raw="INDEX"),
147 | ],
148 | ),
149 | SegmentLocator(
150 | type="alter_table_statement",
151 | children=[
152 | KeywordLocator(raw="ADD"),
153 | KeywordLocator(raw="CONSTRAINT"),
154 | ],
155 | only_with=ConditionalMatch(
156 | locator=SegmentLocator(type="create_table_statement"),
157 | match_by=SegmentLocator(type="table_reference"),
158 | ),
159 | ),
160 | SegmentLocator(
161 | type="alter_table_statement",
162 | children=[
163 | KeywordLocator(raw="ALTER"),
164 | KeywordLocator(raw="COLUMN"),
165 | KeywordLocator(raw="DROP"),
166 | KeywordLocator(raw="NOT"),
167 | KeywordLocator(raw="NULL"),
168 | ],
169 | ),
170 | SegmentLocator(
171 | type="alter_table_statement",
172 | children=[
173 | KeywordLocator(raw="DROP"),
174 | KeywordLocator(raw="CONSTRAINT"),
175 | ],
176 | ),
177 | SegmentLocator(
178 | type="alter_table_statement",
179 | children=[
180 | KeywordLocator(raw="ADD"),
181 | KeywordLocator(raw="CONSTRAINT"),
182 | KeywordLocator(raw="NOT"),
183 | KeywordLocator(raw="VALID"),
184 | ],
185 | ),
186 | SegmentLocator(
187 | type="alter_table_statement",
188 | children=[
189 | KeywordLocator(raw="VALIDATE"),
190 | KeywordLocator(raw="CONSTRAINT"),
191 | ],
192 | ),
193 | SegmentLocator(
194 | type="alter_table_statement",
195 | children=[
196 | KeywordLocator(raw="ADD"),
197 | KeywordLocator(raw="FOREIGN"),
198 | KeywordLocator(raw="KEY"),
199 | KeywordLocator(raw="NOT"),
200 | KeywordLocator(raw="VALID"),
201 | ],
202 | ),
203 | SegmentLocator(
204 | type="alter_table_statement",
205 | children=[
206 | KeywordLocator(raw="ADD"),
207 | KeywordLocator(raw="FOREIGN"),
208 | KeywordLocator(raw="KEY"),
209 | ],
210 | only_with=ConditionalMatch(
211 | locator=SegmentLocator(type="create_table_statement"),
212 | match_by=SegmentLocator(type="table_reference"),
213 | ),
214 | ),
215 | # change type to text is always safe
216 | SegmentLocator(
217 | type="alter_table_statement",
218 | children=[
219 | KeywordLocator(raw="ALTER"),
220 | KeywordLocator(raw="COLUMN"),
221 | KeywordLocator(raw="TYPE"),
222 | SegmentLocator(type="data_type", raw="TEXT"),
223 | ],
224 | ),
225 | # basic ADD COLUMN is default to NULL, so it's safe
226 | SegmentLocator(
227 | type="alter_table_statement",
228 | children=[
229 | KeywordLocator(raw="ADD"),
230 | KeywordLocator(raw="COLUMN"),
231 | KeywordLocator(raw="NOT", inverted=True),
232 | KeywordLocator(raw="NULL", inverted=True),
233 | KeywordLocator(raw="PRIMARY", inverted=True),
234 | KeywordLocator(raw="KEY", inverted=True),
235 | KeywordLocator(raw="IDENTITY", inverted=True),
236 | ],
237 | ),
238 | SegmentLocator(
239 | type="alter_table_statement",
240 | children=[
241 | KeywordLocator(raw="ALTER"),
242 | KeywordLocator(raw="COLUMN"),
243 | KeywordLocator(raw="ADD"),
244 | KeywordLocator(raw="GENERATED"),
245 | KeywordLocator(raw="IDENTITY"),
246 | ],
247 | ),
248 | SegmentLocator(type="create_statistics_statement"),
249 | SegmentLocator(type="analyze_statement"),
250 | SegmentLocator(type="reset_statement"),
251 | SegmentLocator(type="create_type_statement"),
252 | SegmentLocator(
253 | type="alter_type_statement",
254 | children=[
255 | KeywordLocator(raw="ADD"),
256 | KeywordLocator(raw="VALUE"),
257 | ],
258 | ),
259 | SegmentLocator(
260 | type="insert_statement",
261 | children=[SegmentLocator(type="table_reference", raw="alembic_version")],
262 | ),
263 | SegmentLocator(
264 | type="update_statement",
265 | children=[SegmentLocator(type="table_reference", raw="alembic_version")],
266 | ),
267 | SegmentLocator(type="create_function_statement"),
268 | SegmentLocator(type="drop_function_statement"),
269 | SegmentLocator(type="create_trigger"),
270 | SegmentLocator(type="drop_trigger"),
271 | ]
272 |
273 | RESTRICTED_OPERATIONS: List[SegmentLocator] = [
274 | SegmentLocator(
275 | type="create_index_statement",
276 | children=[KeywordLocator(raw="CONCURRENTLY", inverted=True)],
277 | ),
278 | SegmentLocator(
279 | type="drop_index_statement",
280 | children=[KeywordLocator(raw="CONCURRENTLY", inverted=True)],
281 | ),
282 | SegmentLocator(
283 | type="reindex_statement_segment",
284 | children=[KeywordLocator(raw="CONCURRENTLY", inverted=True)],
285 | ),
286 | SegmentLocator(
287 | type="alter_table_statement",
288 | children=[
289 | KeywordLocator(raw="ADD"),
290 | KeywordLocator(raw="CONSTRAINT"),
291 | KeywordLocator(raw="UNIQUE"),
292 | KeywordLocator(raw="USING", inverted=True),
293 | KeywordLocator(raw="INDEX", inverted=True),
294 | ],
295 | ),
296 | SegmentLocator(
297 | type="alter_table_statement",
298 | children=[
299 | KeywordLocator(raw="RENAME"),
300 | KeywordLocator(raw="COLUMN", inverted=True),
301 | ],
302 | ),
303 | SegmentLocator(
304 | type="alter_table_statement",
305 | children=[
306 | KeywordLocator(raw="ALTER"),
307 | KeywordLocator(raw="COLUMN"),
308 | KeywordLocator(raw="TYPE"),
309 | SegmentLocator(type="data_type", raw="TEXT", inverted=True),
310 | ],
311 | ),
312 | SegmentLocator(
313 | type="alter_table_statement",
314 | children=[
315 | KeywordLocator(raw="ADD"),
316 | KeywordLocator(raw="CONSTRAINT"),
317 | KeywordLocator(raw="NOT", inverted=True),
318 | KeywordLocator(raw="VALID", inverted=True),
319 | KeywordLocator(raw="USING", inverted=True),
320 | KeywordLocator(raw="INDEX", inverted=True),
321 | ],
322 | ),
323 | SegmentLocator(
324 | type="alter_table_statement",
325 | children=[
326 | KeywordLocator(raw="ADD"),
327 | KeywordLocator(raw="FOREIGN"),
328 | KeywordLocator(raw="KEY"),
329 | KeywordLocator(raw="NOT", inverted=True),
330 | KeywordLocator(raw="VALID", inverted=True),
331 | ],
332 | ),
333 | SegmentLocator(
334 | type="alter_table_statement",
335 | children=[
336 | KeywordLocator(raw="ADD"),
337 | KeywordLocator(raw="COLUMN"),
338 | KeywordLocator(raw="NOT"),
339 | KeywordLocator(raw="NULL"),
340 | KeywordLocator(raw="DEFAULT", inverted=True, ignore_order=True),
341 | ],
342 | ),
343 | SegmentLocator(
344 | type="alter_table_statement",
345 | children=[
346 | KeywordLocator(raw="ADD"),
347 | KeywordLocator(raw="COLUMN"),
348 | KeywordLocator(raw="PRIMARY"),
349 | KeywordLocator(raw="KEY"),
350 | ],
351 | ),
352 | SegmentLocator(
353 | type="alter_table_statement",
354 | children=[
355 | KeywordLocator(raw="ADD"),
356 | KeywordLocator(raw="COLUMN"),
357 | KeywordLocator(raw="GENERATED"),
358 | KeywordLocator(raw="IDENTITY"),
359 | ],
360 | ),
361 | ]
362 |
363 | DATA_MIGRATION_OPERATIONS = [
364 | SegmentLocator(type="update_statement"),
365 | SegmentLocator(type="insert_statement"),
366 | SegmentLocator(type="delete_statement"),
367 | ]
368 |
369 | IGNORED_OPERATIONS = [
370 | SegmentLocator(type="select_statement"),
371 | SegmentLocator(type="set_statement"),
372 | SegmentLocator(
373 | type="transaction_statement", children=[KeywordLocator(raw="BEGIN")]
374 | ),
375 | SegmentLocator(type="transaction_statement", children=[KeywordLocator(raw="END")]),
376 | SegmentLocator(
377 | type="transaction_statement", children=[KeywordLocator(raw="COMMIT")]
378 | ),
379 | ]
380 |
--------------------------------------------------------------------------------
/migration_lint/util/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/PandaDoc/migration-lint/595a5026b3857b9213af2c0259e019525d6a38e0/migration_lint/util/__init__.py
--------------------------------------------------------------------------------
/migration_lint/util/colors.py:
--------------------------------------------------------------------------------
1 | from enum import Enum
2 |
3 |
4 | class ColorCode(Enum):
5 | blue = "\x1b[1;34m"
6 | grey = "\x1b[38;21m"
7 | green = "\x1b[1;32m"
8 | red = "\x1b[31;21m"
9 | yellow = "\x1b[33;21m"
10 | reset = "\x1b[0m"
11 |
12 |
13 | def colorize(color: ColorCode, msg: str) -> str:
14 | return f"{color.value}{msg}{ColorCode.reset.value}"
15 |
16 |
17 | def grey(msg: str) -> str:
18 | return colorize(ColorCode.grey, msg)
19 |
20 |
21 | def green(msg: str) -> str:
22 | return colorize(ColorCode.green, msg)
23 |
24 |
25 | def yellow(msg: str) -> str:
26 | return colorize(ColorCode.yellow, msg)
27 |
28 |
29 | def red(msg: str) -> str:
30 | return colorize(ColorCode.red, msg)
31 |
32 |
33 | def blue(msg: str) -> str:
34 | return colorize(ColorCode.blue, msg)
35 |
--------------------------------------------------------------------------------
/migration_lint/util/env.py:
--------------------------------------------------------------------------------
1 | import os
2 |
3 |
4 | def get_bool_env(var_name, default: bool) -> bool:
5 | str_value = os.getenv(var_name, str(default)).lower()
6 | if str_value in ("true", "1", "yes"):
7 | return True
8 | elif str_value in ("false", "0", "no"):
9 | return False
10 | raise RuntimeError(f"Invalid boolean value for env {var_name}={str_value}")
11 |
--------------------------------------------------------------------------------
/mkdocs.yml:
--------------------------------------------------------------------------------
1 | site_name: packages/migration-lint
2 | site_url: https://pandadoc.github.io/migration-lint/
3 |
4 | repo_name: pandadoc/migration-lint
5 | repo_url: https://github.com/pandadoc/migration-lint
6 |
7 | theme:
8 | name: material
9 | custom_dir: docs-theme
10 | icon:
11 | repo: fontawesome/brands/github
12 | palette:
13 | # Palette toggle for automatic mode
14 | - media: "(prefers-color-scheme)"
15 | toggle:
16 | icon: material/brightness-auto
17 | name: Switch to light mode
18 | # Palette toggle for light mode
19 | - media: "(prefers-color-scheme: light)"
20 | scheme: default
21 | toggle:
22 | icon: material/brightness-7
23 | name: Switch to dark mode
24 | # Palette toggle for dark mode
25 | - media: "(prefers-color-scheme: dark)"
26 | scheme: slate
27 | toggle:
28 | icon: material/brightness-4
29 | name: Switch to system preference
30 | features:
31 | - content.code.annotate
32 | - content.code.copy
33 | - content.code.select
34 | - navigation.indexes
35 | - navigation.instant
36 | - navigation.expand
37 | - navigation.top
38 | - navigation.footer
39 | - navigation.tabs
40 | - navigation.tabs.sticky
41 | - navigation.tracking
42 | - search.suggest
43 | - search.highlight
44 | - toc.follow
45 | - toc.integrate
46 |
47 | extra:
48 | social:
49 | - icon: fontawesome/solid/paper-plane
50 | link: mailto:migration-lint-team@pandadoc.com
51 | version:
52 | provider: mike
53 |
54 | markdown_extensions:
55 | - admonition
56 | - attr_list
57 | - toc:
58 | permalink: true
59 | - tables
60 | - pymdownx.emoji:
61 | emoji_index: !!python/name:material.extensions.emoji.twemoji
62 | emoji_generator: !!python/name:material.extensions.emoji.to_svg
63 | - pymdownx.highlight:
64 | linenums: true
65 | linenums_style: pymdownx-inline
66 | - pymdownx.inlinehilite
67 | - pymdownx.details
68 | - pymdownx.superfences:
69 | custom_fences:
70 | - name: mermaid
71 | class: mermaid
72 | format: !!python/name:pymdownx.superfences.fence_code_format
73 | - pymdownx.snippets
74 |
75 | plugins:
76 | - search
77 | - tags:
78 | tags_file: tags.md
79 |
80 | nav:
81 | - Welcome: index.md
82 | - Rules API: rules.md
83 | - Migration classification: classification.md
84 |
--------------------------------------------------------------------------------
/poetry.lock:
--------------------------------------------------------------------------------
1 | [[package]]
2 | name = "appdirs"
3 | version = "1.4.4"
4 | description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"."
5 | category = "main"
6 | optional = false
7 | python-versions = "*"
8 |
9 | [[package]]
10 | name = "asgiref"
11 | version = "3.8.1"
12 | description = "ASGI specs, helper code, and adapters"
13 | category = "main"
14 | optional = false
15 | python-versions = ">=3.8"
16 |
17 | [package.dependencies]
18 | typing-extensions = {version = ">=4", markers = "python_version < \"3.11\""}
19 |
20 | [package.extras]
21 | tests = ["mypy (>=0.800)", "pytest", "pytest-asyncio"]
22 |
23 | [[package]]
24 | name = "chardet"
25 | version = "5.2.0"
26 | description = "Universal encoding detector for Python 3"
27 | category = "main"
28 | optional = false
29 | python-versions = ">=3.7"
30 |
31 | [[package]]
32 | name = "click"
33 | version = "8.1.7"
34 | description = "Composable command line interface toolkit"
35 | category = "main"
36 | optional = false
37 | python-versions = ">=3.7"
38 |
39 | [package.dependencies]
40 | colorama = {version = "*", markers = "platform_system == \"Windows\""}
41 |
42 | [[package]]
43 | name = "colorama"
44 | version = "0.4.6"
45 | description = "Cross-platform colored terminal text."
46 | category = "main"
47 | optional = false
48 | python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7"
49 |
50 | [[package]]
51 | name = "coverage"
52 | version = "7.6.1"
53 | description = "Code coverage measurement for Python"
54 | category = "dev"
55 | optional = false
56 | python-versions = ">=3.8"
57 |
58 | [package.dependencies]
59 | tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""}
60 |
61 | [package.extras]
62 | toml = ["tomli"]
63 |
64 | [[package]]
65 | name = "diff-cover"
66 | version = "7.7.0"
67 | description = "Run coverage and linting reports on diffs"
68 | category = "main"
69 | optional = false
70 | python-versions = ">=3.7.2,<4.0.0"
71 |
72 | [package.dependencies]
73 | chardet = ">=3.0.0"
74 | Jinja2 = ">=2.7.1"
75 | pluggy = ">=0.13.1,<2"
76 | Pygments = ">=2.9.0,<3.0.0"
77 |
78 | [package.extras]
79 | toml = ["tomli (>=1.2.1)"]
80 |
81 | [[package]]
82 | name = "django"
83 | version = "4.2.16"
84 | description = "A high-level Python web framework that encourages rapid development and clean, pragmatic design."
85 | category = "main"
86 | optional = false
87 | python-versions = ">=3.8"
88 |
89 | [package.dependencies]
90 | asgiref = ">=3.6.0,<4"
91 | sqlparse = ">=0.3.1"
92 | tzdata = {version = "*", markers = "sys_platform == \"win32\""}
93 |
94 | [package.extras]
95 | argon2 = ["argon2-cffi (>=19.1.0)"]
96 | bcrypt = ["bcrypt"]
97 |
98 | [[package]]
99 | name = "django-stubs"
100 | version = "5.1.0"
101 | description = "Mypy stubs for Django"
102 | category = "dev"
103 | optional = false
104 | python-versions = ">=3.8"
105 |
106 | [package.dependencies]
107 | asgiref = "*"
108 | django = "*"
109 | django-stubs-ext = ">=5.1.0"
110 | tomli = {version = "*", markers = "python_version < \"3.11\""}
111 | types-PyYAML = "*"
112 | typing-extensions = ">=4.11.0"
113 |
114 | [package.extras]
115 | compatible-mypy = ["mypy (>=1.11.0,<1.12.0)"]
116 | oracle = ["oracledb"]
117 | redis = ["redis"]
118 |
119 | [[package]]
120 | name = "django-stubs-ext"
121 | version = "5.1.0"
122 | description = "Monkey-patching and extensions for django-stubs"
123 | category = "dev"
124 | optional = false
125 | python-versions = ">=3.8"
126 |
127 | [package.dependencies]
128 | django = "*"
129 | typing-extensions = "*"
130 |
131 | [[package]]
132 | name = "exceptiongroup"
133 | version = "1.2.2"
134 | description = "Backport of PEP 654 (exception groups)"
135 | category = "main"
136 | optional = false
137 | python-versions = ">=3.7"
138 |
139 | [package.extras]
140 | test = ["pytest (>=6)"]
141 |
142 | [[package]]
143 | name = "gitdb"
144 | version = "4.0.11"
145 | description = "Git Object Database"
146 | category = "main"
147 | optional = true
148 | python-versions = ">=3.7"
149 |
150 | [package.dependencies]
151 | smmap = ">=3.0.1,<6"
152 |
153 | [[package]]
154 | name = "gitpython"
155 | version = "3.1.43"
156 | description = "GitPython is a Python library used to interact with Git repositories"
157 | category = "main"
158 | optional = true
159 | python-versions = ">=3.7"
160 |
161 | [package.dependencies]
162 | gitdb = ">=4.0.1,<5"
163 |
164 | [package.extras]
165 | doc = ["sphinx (==4.3.2)", "sphinx-autodoc-typehints", "sphinx-rtd-theme", "sphinxcontrib-applehelp (>=1.0.2,<=1.0.4)", "sphinxcontrib-devhelp (==1.0.2)", "sphinxcontrib-htmlhelp (>=2.0.0,<=2.0.1)", "sphinxcontrib-qthelp (==1.0.3)", "sphinxcontrib-serializinghtml (==1.1.5)"]
166 | test = ["coverage[toml]", "ddt (>=1.1.1,!=1.4.3)", "mock", "mypy", "pre-commit", "pytest (>=7.3.1)", "pytest-cov", "pytest-instafail", "pytest-mock", "pytest-sugar", "typing-extensions"]
167 |
168 | [[package]]
169 | name = "iniconfig"
170 | version = "2.0.0"
171 | description = "brain-dead simple config-ini parsing"
172 | category = "main"
173 | optional = false
174 | python-versions = ">=3.7"
175 |
176 | [[package]]
177 | name = "jinja2"
178 | version = "3.1.4"
179 | description = "A very fast and expressive template engine."
180 | category = "main"
181 | optional = false
182 | python-versions = ">=3.7"
183 |
184 | [package.dependencies]
185 | MarkupSafe = ">=2.0"
186 |
187 | [package.extras]
188 | i18n = ["Babel (>=2.7)"]
189 |
190 | [[package]]
191 | name = "markupsafe"
192 | version = "2.1.5"
193 | description = "Safely add untrusted strings to HTML/XML markup."
194 | category = "main"
195 | optional = false
196 | python-versions = ">=3.7"
197 |
198 | [[package]]
199 | name = "mypy"
200 | version = "1.13.0"
201 | description = "Optional static typing for Python"
202 | category = "dev"
203 | optional = false
204 | python-versions = ">=3.8"
205 |
206 | [package.dependencies]
207 | mypy-extensions = ">=1.0.0"
208 | tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""}
209 | typing-extensions = ">=4.6.0"
210 |
211 | [package.extras]
212 | dmypy = ["psutil (>=4.0)"]
213 | faster-cache = ["orjson"]
214 | install-types = ["pip"]
215 | mypyc = ["setuptools (>=50)"]
216 | reports = ["lxml"]
217 |
218 | [[package]]
219 | name = "mypy-extensions"
220 | version = "1.0.0"
221 | description = "Type system extensions for programs checked with the mypy type checker."
222 | category = "dev"
223 | optional = false
224 | python-versions = ">=3.5"
225 |
226 | [[package]]
227 | name = "packaging"
228 | version = "24.1"
229 | description = "Core utilities for Python packages"
230 | category = "main"
231 | optional = false
232 | python-versions = ">=3.8"
233 |
234 | [[package]]
235 | name = "pathspec"
236 | version = "0.12.1"
237 | description = "Utility library for gitignore style pattern matching of file paths."
238 | category = "main"
239 | optional = false
240 | python-versions = ">=3.8"
241 |
242 | [[package]]
243 | name = "pluggy"
244 | version = "1.5.0"
245 | description = "plugin and hook calling mechanisms for python"
246 | category = "main"
247 | optional = false
248 | python-versions = ">=3.8"
249 |
250 | [package.extras]
251 | dev = ["pre-commit", "tox"]
252 | testing = ["pytest", "pytest-benchmark"]
253 |
254 | [[package]]
255 | name = "pygments"
256 | version = "2.18.0"
257 | description = "Pygments is a syntax highlighting package written in Python."
258 | category = "main"
259 | optional = false
260 | python-versions = ">=3.8"
261 |
262 | [package.extras]
263 | windows-terminal = ["colorama (>=0.4.6)"]
264 |
265 | [[package]]
266 | name = "pytest"
267 | version = "8.3.3"
268 | description = "pytest: simple powerful testing with Python"
269 | category = "main"
270 | optional = false
271 | python-versions = ">=3.8"
272 |
273 | [package.dependencies]
274 | colorama = {version = "*", markers = "sys_platform == \"win32\""}
275 | exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""}
276 | iniconfig = "*"
277 | packaging = "*"
278 | pluggy = ">=1.5,<2"
279 | tomli = {version = ">=1", markers = "python_version < \"3.11\""}
280 |
281 | [package.extras]
282 | dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"]
283 |
284 | [[package]]
285 | name = "pytest-cov"
286 | version = "4.1.0"
287 | description = "Pytest plugin for measuring coverage."
288 | category = "dev"
289 | optional = false
290 | python-versions = ">=3.7"
291 |
292 | [package.dependencies]
293 | coverage = {version = ">=5.2.1", extras = ["toml"]}
294 | pytest = ">=4.6"
295 |
296 | [package.extras]
297 | testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtualenv"]
298 |
299 | [[package]]
300 | name = "pyyaml"
301 | version = "6.0.2"
302 | description = "YAML parser and emitter for Python"
303 | category = "main"
304 | optional = false
305 | python-versions = ">=3.8"
306 |
307 | [[package]]
308 | name = "regex"
309 | version = "2024.9.11"
310 | description = "Alternative regular expression module, to replace re."
311 | category = "main"
312 | optional = false
313 | python-versions = ">=3.8"
314 |
315 | [[package]]
316 | name = "ruff"
317 | version = "0.7.0"
318 | description = "An extremely fast Python linter and code formatter, written in Rust."
319 | category = "dev"
320 | optional = false
321 | python-versions = ">=3.7"
322 |
323 | [[package]]
324 | name = "smmap"
325 | version = "5.0.1"
326 | description = "A pure Python implementation of a sliding window memory map manager"
327 | category = "main"
328 | optional = true
329 | python-versions = ">=3.7"
330 |
331 | [[package]]
332 | name = "sqlfluff"
333 | version = "3.2.4"
334 | description = "The SQL Linter for Humans"
335 | category = "main"
336 | optional = false
337 | python-versions = ">=3.8"
338 |
339 | [package.dependencies]
340 | appdirs = "*"
341 | chardet = "*"
342 | click = "*"
343 | colorama = ">=0.3"
344 | diff-cover = ">=2.5.0"
345 | Jinja2 = "*"
346 | pathspec = "*"
347 | pytest = "*"
348 | pyyaml = ">=5.1"
349 | regex = "*"
350 | tblib = "*"
351 | toml = {version = "*", markers = "python_version < \"3.11\""}
352 | tqdm = "*"
353 |
354 | [[package]]
355 | name = "sqlparse"
356 | version = "0.5.1"
357 | description = "A non-validating SQL parser."
358 | category = "main"
359 | optional = false
360 | python-versions = ">=3.8"
361 |
362 | [package.extras]
363 | dev = ["build", "hatch"]
364 | doc = ["sphinx"]
365 |
366 | [[package]]
367 | name = "tblib"
368 | version = "3.0.0"
369 | description = "Traceback serialization library."
370 | category = "main"
371 | optional = false
372 | python-versions = ">=3.8"
373 |
374 | [[package]]
375 | name = "toml"
376 | version = "0.10.2"
377 | description = "Python Library for Tom's Obvious, Minimal Language"
378 | category = "main"
379 | optional = false
380 | python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*"
381 |
382 | [[package]]
383 | name = "tomli"
384 | version = "2.0.2"
385 | description = "A lil' TOML parser"
386 | category = "main"
387 | optional = false
388 | python-versions = ">=3.8"
389 |
390 | [[package]]
391 | name = "tqdm"
392 | version = "4.66.5"
393 | description = "Fast, Extensible Progress Meter"
394 | category = "main"
395 | optional = false
396 | python-versions = ">=3.7"
397 |
398 | [package.dependencies]
399 | colorama = {version = "*", markers = "platform_system == \"Windows\""}
400 |
401 | [package.extras]
402 | dev = ["pytest (>=6)", "pytest-cov", "pytest-timeout", "pytest-xdist"]
403 | notebook = ["ipywidgets (>=6)"]
404 | slack = ["slack-sdk"]
405 | telegram = ["requests"]
406 |
407 | [[package]]
408 | name = "types-pyyaml"
409 | version = "6.0.12.20240917"
410 | description = "Typing stubs for PyYAML"
411 | category = "dev"
412 | optional = false
413 | python-versions = ">=3.8"
414 |
415 | [[package]]
416 | name = "typing-extensions"
417 | version = "4.12.2"
418 | description = "Backported and Experimental Type Hints for Python 3.8+"
419 | category = "main"
420 | optional = false
421 | python-versions = ">=3.8"
422 |
423 | [[package]]
424 | name = "tzdata"
425 | version = "2024.2"
426 | description = "Provider of IANA time zone data"
427 | category = "main"
428 | optional = false
429 | python-versions = ">=2"
430 |
431 | [extras]
432 | django = ["django"]
433 | git = ["gitpython"]
434 |
435 | [metadata]
436 | lock-version = "1.1"
437 | python-versions = "^3.9"
438 | content-hash = "476530d3e8dea732858d54063e4853594d67e0c29966b569bb18320cc600c77c"
439 |
440 | [metadata.files]
441 | appdirs = [
442 | {file = "appdirs-1.4.4-py2.py3-none-any.whl", hash = "sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128"},
443 | {file = "appdirs-1.4.4.tar.gz", hash = "sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41"},
444 | ]
445 | asgiref = [
446 | {file = "asgiref-3.8.1-py3-none-any.whl", hash = "sha256:3e1e3ecc849832fe52ccf2cb6686b7a55f82bb1d6aee72a58826471390335e47"},
447 | {file = "asgiref-3.8.1.tar.gz", hash = "sha256:c343bd80a0bec947a9860adb4c432ffa7db769836c64238fc34bdc3fec84d590"},
448 | ]
449 | chardet = [
450 | {file = "chardet-5.2.0-py3-none-any.whl", hash = "sha256:e1cf59446890a00105fe7b7912492ea04b6e6f06d4b742b2c788469e34c82970"},
451 | {file = "chardet-5.2.0.tar.gz", hash = "sha256:1b3b6ff479a8c414bc3fa2c0852995695c4a026dcd6d0633b2dd092ca39c1cf7"},
452 | ]
453 | click = [
454 | {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"},
455 | {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"},
456 | ]
457 | colorama = [
458 | {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"},
459 | {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"},
460 | ]
461 | coverage = [
462 | {file = "coverage-7.6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b06079abebbc0e89e6163b8e8f0e16270124c154dc6e4a47b413dd538859af16"},
463 | {file = "coverage-7.6.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:cf4b19715bccd7ee27b6b120e7e9dd56037b9c0681dcc1adc9ba9db3d417fa36"},
464 | {file = "coverage-7.6.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61c0abb4c85b095a784ef23fdd4aede7a2628478e7baba7c5e3deba61070a02"},
465 | {file = "coverage-7.6.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fd21f6ae3f08b41004dfb433fa895d858f3f5979e7762d052b12aef444e29afc"},
466 | {file = "coverage-7.6.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f59d57baca39b32db42b83b2a7ba6f47ad9c394ec2076b084c3f029b7afca23"},
467 | {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a1ac0ae2b8bd743b88ed0502544847c3053d7171a3cff9228af618a068ed9c34"},
468 | {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e6a08c0be454c3b3beb105c0596ebdc2371fab6bb90c0c0297f4e58fd7e1012c"},
469 | {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f5796e664fe802da4f57a168c85359a8fbf3eab5e55cd4e4569fbacecc903959"},
470 | {file = "coverage-7.6.1-cp310-cp310-win32.whl", hash = "sha256:7bb65125fcbef8d989fa1dd0e8a060999497629ca5b0efbca209588a73356232"},
471 | {file = "coverage-7.6.1-cp310-cp310-win_amd64.whl", hash = "sha256:3115a95daa9bdba70aea750db7b96b37259a81a709223c8448fa97727d546fe0"},
472 | {file = "coverage-7.6.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7dea0889685db8550f839fa202744652e87c60015029ce3f60e006f8c4462c93"},
473 | {file = "coverage-7.6.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ed37bd3c3b063412f7620464a9ac1314d33100329f39799255fb8d3027da50d3"},
474 | {file = "coverage-7.6.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d85f5e9a5f8b73e2350097c3756ef7e785f55bd71205defa0bfdaf96c31616ff"},
475 | {file = "coverage-7.6.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bc572be474cafb617672c43fe989d6e48d3c83af02ce8de73fff1c6bb3c198d"},
476 | {file = "coverage-7.6.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c0420b573964c760df9e9e86d1a9a622d0d27f417e1a949a8a66dd7bcee7bc6"},
477 | {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1f4aa8219db826ce6be7099d559f8ec311549bfc4046f7f9fe9b5cea5c581c56"},
478 | {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:fc5a77d0c516700ebad189b587de289a20a78324bc54baee03dd486f0855d234"},
479 | {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b48f312cca9621272ae49008c7f613337c53fadca647d6384cc129d2996d1133"},
480 | {file = "coverage-7.6.1-cp311-cp311-win32.whl", hash = "sha256:1125ca0e5fd475cbbba3bb67ae20bd2c23a98fac4e32412883f9bcbaa81c314c"},
481 | {file = "coverage-7.6.1-cp311-cp311-win_amd64.whl", hash = "sha256:8ae539519c4c040c5ffd0632784e21b2f03fc1340752af711f33e5be83a9d6c6"},
482 | {file = "coverage-7.6.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:95cae0efeb032af8458fc27d191f85d1717b1d4e49f7cb226cf526ff28179778"},
483 | {file = "coverage-7.6.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5621a9175cf9d0b0c84c2ef2b12e9f5f5071357c4d2ea6ca1cf01814f45d2391"},
484 | {file = "coverage-7.6.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:260933720fdcd75340e7dbe9060655aff3af1f0c5d20f46b57f262ab6c86a5e8"},
485 | {file = "coverage-7.6.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07e2ca0ad381b91350c0ed49d52699b625aab2b44b65e1b4e02fa9df0e92ad2d"},
486 | {file = "coverage-7.6.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c44fee9975f04b33331cb8eb272827111efc8930cfd582e0320613263ca849ca"},
487 | {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:877abb17e6339d96bf08e7a622d05095e72b71f8afd8a9fefc82cf30ed944163"},
488 | {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:3e0cadcf6733c09154b461f1ca72d5416635e5e4ec4e536192180d34ec160f8a"},
489 | {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c3c02d12f837d9683e5ab2f3d9844dc57655b92c74e286c262e0fc54213c216d"},
490 | {file = "coverage-7.6.1-cp312-cp312-win32.whl", hash = "sha256:e05882b70b87a18d937ca6768ff33cc3f72847cbc4de4491c8e73880766718e5"},
491 | {file = "coverage-7.6.1-cp312-cp312-win_amd64.whl", hash = "sha256:b5d7b556859dd85f3a541db6a4e0167b86e7273e1cdc973e5b175166bb634fdb"},
492 | {file = "coverage-7.6.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a4acd025ecc06185ba2b801f2de85546e0b8ac787cf9d3b06e7e2a69f925b106"},
493 | {file = "coverage-7.6.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a6d3adcf24b624a7b778533480e32434a39ad8fa30c315208f6d3e5542aeb6e9"},
494 | {file = "coverage-7.6.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0c212c49b6c10e6951362f7c6df3329f04c2b1c28499563d4035d964ab8e08c"},
495 | {file = "coverage-7.6.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e81d7a3e58882450ec4186ca59a3f20a5d4440f25b1cff6f0902ad890e6748a"},
496 | {file = "coverage-7.6.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78b260de9790fd81e69401c2dc8b17da47c8038176a79092a89cb2b7d945d060"},
497 | {file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a78d169acd38300060b28d600344a803628c3fd585c912cacc9ea8790fe96862"},
498 | {file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2c09f4ce52cb99dd7505cd0fc8e0e37c77b87f46bc9c1eb03fe3bc9991085388"},
499 | {file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6878ef48d4227aace338d88c48738a4258213cd7b74fd9a3d4d7582bb1d8a155"},
500 | {file = "coverage-7.6.1-cp313-cp313-win32.whl", hash = "sha256:44df346d5215a8c0e360307d46ffaabe0f5d3502c8a1cefd700b34baf31d411a"},
501 | {file = "coverage-7.6.1-cp313-cp313-win_amd64.whl", hash = "sha256:8284cf8c0dd272a247bc154eb6c95548722dce90d098c17a883ed36e67cdb129"},
502 | {file = "coverage-7.6.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:d3296782ca4eab572a1a4eca686d8bfb00226300dcefdf43faa25b5242ab8a3e"},
503 | {file = "coverage-7.6.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:502753043567491d3ff6d08629270127e0c31d4184c4c8d98f92c26f65019962"},
504 | {file = "coverage-7.6.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6a89ecca80709d4076b95f89f308544ec8f7b4727e8a547913a35f16717856cb"},
505 | {file = "coverage-7.6.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a318d68e92e80af8b00fa99609796fdbcdfef3629c77c6283566c6f02c6d6704"},
506 | {file = "coverage-7.6.1-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13b0a73a0896988f053e4fbb7de6d93388e6dd292b0d87ee51d106f2c11b465b"},
507 | {file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:4421712dbfc5562150f7554f13dde997a2e932a6b5f352edcce948a815efee6f"},
508 | {file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:166811d20dfea725e2e4baa71fffd6c968a958577848d2131f39b60043400223"},
509 | {file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:225667980479a17db1048cb2bf8bfb39b8e5be8f164b8f6628b64f78a72cf9d3"},
510 | {file = "coverage-7.6.1-cp313-cp313t-win32.whl", hash = "sha256:170d444ab405852903b7d04ea9ae9b98f98ab6d7e63e1115e82620807519797f"},
511 | {file = "coverage-7.6.1-cp313-cp313t-win_amd64.whl", hash = "sha256:b9f222de8cded79c49bf184bdbc06630d4c58eec9459b939b4a690c82ed05657"},
512 | {file = "coverage-7.6.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6db04803b6c7291985a761004e9060b2bca08da6d04f26a7f2294b8623a0c1a0"},
513 | {file = "coverage-7.6.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f1adfc8ac319e1a348af294106bc6a8458a0f1633cc62a1446aebc30c5fa186a"},
514 | {file = "coverage-7.6.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a95324a9de9650a729239daea117df21f4b9868ce32e63f8b650ebe6cef5595b"},
515 | {file = "coverage-7.6.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b43c03669dc4618ec25270b06ecd3ee4fa94c7f9b3c14bae6571ca00ef98b0d3"},
516 | {file = "coverage-7.6.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8929543a7192c13d177b770008bc4e8119f2e1f881d563fc6b6305d2d0ebe9de"},
517 | {file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:a09ece4a69cf399510c8ab25e0950d9cf2b42f7b3cb0374f95d2e2ff594478a6"},
518 | {file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:9054a0754de38d9dbd01a46621636689124d666bad1936d76c0341f7d71bf569"},
519 | {file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:0dbde0f4aa9a16fa4d754356a8f2e36296ff4d83994b2c9d8398aa32f222f989"},
520 | {file = "coverage-7.6.1-cp38-cp38-win32.whl", hash = "sha256:da511e6ad4f7323ee5702e6633085fb76c2f893aaf8ce4c51a0ba4fc07580ea7"},
521 | {file = "coverage-7.6.1-cp38-cp38-win_amd64.whl", hash = "sha256:3f1156e3e8f2872197af3840d8ad307a9dd18e615dc64d9ee41696f287c57ad8"},
522 | {file = "coverage-7.6.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:abd5fd0db5f4dc9289408aaf34908072f805ff7792632250dcb36dc591d24255"},
523 | {file = "coverage-7.6.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:547f45fa1a93154bd82050a7f3cddbc1a7a4dd2a9bf5cb7d06f4ae29fe94eaf8"},
524 | {file = "coverage-7.6.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:645786266c8f18a931b65bfcefdbf6952dd0dea98feee39bd188607a9d307ed2"},
525 | {file = "coverage-7.6.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9e0b2df163b8ed01d515807af24f63de04bebcecbd6c3bfeff88385789fdf75a"},
526 | {file = "coverage-7.6.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:609b06f178fe8e9f89ef676532760ec0b4deea15e9969bf754b37f7c40326dbc"},
527 | {file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:702855feff378050ae4f741045e19a32d57d19f3e0676d589df0575008ea5004"},
528 | {file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:2bdb062ea438f22d99cba0d7829c2ef0af1d768d1e4a4f528087224c90b132cb"},
529 | {file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:9c56863d44bd1c4fe2abb8a4d6f5371d197f1ac0ebdee542f07f35895fc07f36"},
530 | {file = "coverage-7.6.1-cp39-cp39-win32.whl", hash = "sha256:6e2cd258d7d927d09493c8df1ce9174ad01b381d4729a9d8d4e38670ca24774c"},
531 | {file = "coverage-7.6.1-cp39-cp39-win_amd64.whl", hash = "sha256:06a737c882bd26d0d6ee7269b20b12f14a8704807a01056c80bb881a4b2ce6ca"},
532 | {file = "coverage-7.6.1-pp38.pp39.pp310-none-any.whl", hash = "sha256:e9a6e0eb86070e8ccaedfbd9d38fec54864f3125ab95419970575b42af7541df"},
533 | {file = "coverage-7.6.1.tar.gz", hash = "sha256:953510dfb7b12ab69d20135a0662397f077c59b1e6379a768e97c59d852ee51d"},
534 | ]
535 | diff-cover = [
536 | {file = "diff_cover-7.7.0-py3-none-any.whl", hash = "sha256:bf86f32ec999f9a9e79bf24969f7127ea7b4e55c3ef3cd9300feb13188c89736"},
537 | {file = "diff_cover-7.7.0.tar.gz", hash = "sha256:60614cf7e722cf7fb1bde497afac0b514294e1e26534449622dac4da296123fb"},
538 | ]
539 | django = [
540 | {file = "Django-4.2.16-py3-none-any.whl", hash = "sha256:1ddc333a16fc139fd253035a1606bb24261951bbc3a6ca256717fa06cc41a898"},
541 | {file = "Django-4.2.16.tar.gz", hash = "sha256:6f1616c2786c408ce86ab7e10f792b8f15742f7b7b7460243929cb371e7f1dad"},
542 | ]
543 | django-stubs = [
544 | {file = "django_stubs-5.1.0-py3-none-any.whl", hash = "sha256:b98d49a80aa4adf1433a97407102d068de26c739c405431d93faad96dd282c40"},
545 | {file = "django_stubs-5.1.0.tar.gz", hash = "sha256:86128c228b65e6c9a85e5dc56eb1c6f41125917dae0e21e6cfecdf1b27e630c5"},
546 | ]
547 | django-stubs-ext = [
548 | {file = "django_stubs_ext-5.1.0-py3-none-any.whl", hash = "sha256:a455fc222c90b30b29ad8c53319559f5b54a99b4197205ddbb385aede03b395d"},
549 | {file = "django_stubs_ext-5.1.0.tar.gz", hash = "sha256:ed7d51c0b731651879fc75f331fb0806d98b67bfab464e96e2724db6b46ef926"},
550 | ]
551 | exceptiongroup = [
552 | {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"},
553 | {file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"},
554 | ]
555 | gitdb = [
556 | {file = "gitdb-4.0.11-py3-none-any.whl", hash = "sha256:81a3407ddd2ee8df444cbacea00e2d038e40150acfa3001696fe0dcf1d3adfa4"},
557 | {file = "gitdb-4.0.11.tar.gz", hash = "sha256:bf5421126136d6d0af55bc1e7c1af1c397a34f5b7bd79e776cd3e89785c2b04b"},
558 | ]
559 | gitpython = [
560 | {file = "GitPython-3.1.43-py3-none-any.whl", hash = "sha256:eec7ec56b92aad751f9912a73404bc02ba212a23adb2c7098ee668417051a1ff"},
561 | {file = "GitPython-3.1.43.tar.gz", hash = "sha256:35f314a9f878467f5453cc1fee295c3e18e52f1b99f10f6cf5b1682e968a9e7c"},
562 | ]
563 | iniconfig = [
564 | {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"},
565 | {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"},
566 | ]
567 | jinja2 = [
568 | {file = "jinja2-3.1.4-py3-none-any.whl", hash = "sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d"},
569 | {file = "jinja2-3.1.4.tar.gz", hash = "sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369"},
570 | ]
571 | markupsafe = [
572 | {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"},
573 | {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"},
574 | {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"},
575 | {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"},
576 | {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"},
577 | {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"},
578 | {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"},
579 | {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"},
580 | {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"},
581 | {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"},
582 | {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"},
583 | {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"},
584 | {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"},
585 | {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"},
586 | {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"},
587 | {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"},
588 | {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"},
589 | {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"},
590 | {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"},
591 | {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"},
592 | {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"},
593 | {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"},
594 | {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"},
595 | {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"},
596 | {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"},
597 | {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"},
598 | {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"},
599 | {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"},
600 | {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"},
601 | {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"},
602 | {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"},
603 | {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"},
604 | {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"},
605 | {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"},
606 | {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"},
607 | {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"},
608 | {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"},
609 | {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"},
610 | {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"},
611 | {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"},
612 | {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"},
613 | {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"},
614 | {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"},
615 | {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"},
616 | {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"},
617 | {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"},
618 | {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"},
619 | {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"},
620 | {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"},
621 | {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"},
622 | {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"},
623 | {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"},
624 | {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"},
625 | {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"},
626 | {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"},
627 | {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"},
628 | {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"},
629 | {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"},
630 | {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"},
631 | {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"},
632 | ]
633 | mypy = [
634 | {file = "mypy-1.13.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6607e0f1dd1fb7f0aca14d936d13fd19eba5e17e1cd2a14f808fa5f8f6d8f60a"},
635 | {file = "mypy-1.13.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8a21be69bd26fa81b1f80a61ee7ab05b076c674d9b18fb56239d72e21d9f4c80"},
636 | {file = "mypy-1.13.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7b2353a44d2179846a096e25691d54d59904559f4232519d420d64da6828a3a7"},
637 | {file = "mypy-1.13.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0730d1c6a2739d4511dc4253f8274cdd140c55c32dfb0a4cf8b7a43f40abfa6f"},
638 | {file = "mypy-1.13.0-cp310-cp310-win_amd64.whl", hash = "sha256:c5fc54dbb712ff5e5a0fca797e6e0aa25726c7e72c6a5850cfd2adbc1eb0a372"},
639 | {file = "mypy-1.13.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:581665e6f3a8a9078f28d5502f4c334c0c8d802ef55ea0e7276a6e409bc0d82d"},
640 | {file = "mypy-1.13.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3ddb5b9bf82e05cc9a627e84707b528e5c7caaa1c55c69e175abb15a761cec2d"},
641 | {file = "mypy-1.13.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:20c7ee0bc0d5a9595c46f38beb04201f2620065a93755704e141fcac9f59db2b"},
642 | {file = "mypy-1.13.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3790ded76f0b34bc9c8ba4def8f919dd6a46db0f5a6610fb994fe8efdd447f73"},
643 | {file = "mypy-1.13.0-cp311-cp311-win_amd64.whl", hash = "sha256:51f869f4b6b538229c1d1bcc1dd7d119817206e2bc54e8e374b3dfa202defcca"},
644 | {file = "mypy-1.13.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:5c7051a3461ae84dfb5dd15eff5094640c61c5f22257c8b766794e6dd85e72d5"},
645 | {file = "mypy-1.13.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:39bb21c69a5d6342f4ce526e4584bc5c197fd20a60d14a8624d8743fffb9472e"},
646 | {file = "mypy-1.13.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:164f28cb9d6367439031f4c81e84d3ccaa1e19232d9d05d37cb0bd880d3f93c2"},
647 | {file = "mypy-1.13.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a4c1bfcdbce96ff5d96fc9b08e3831acb30dc44ab02671eca5953eadad07d6d0"},
648 | {file = "mypy-1.13.0-cp312-cp312-win_amd64.whl", hash = "sha256:a0affb3a79a256b4183ba09811e3577c5163ed06685e4d4b46429a271ba174d2"},
649 | {file = "mypy-1.13.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a7b44178c9760ce1a43f544e595d35ed61ac2c3de306599fa59b38a6048e1aa7"},
650 | {file = "mypy-1.13.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5d5092efb8516d08440e36626f0153b5006d4088c1d663d88bf79625af3d1d62"},
651 | {file = "mypy-1.13.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:de2904956dac40ced10931ac967ae63c5089bd498542194b436eb097a9f77bc8"},
652 | {file = "mypy-1.13.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:7bfd8836970d33c2105562650656b6846149374dc8ed77d98424b40b09340ba7"},
653 | {file = "mypy-1.13.0-cp313-cp313-win_amd64.whl", hash = "sha256:9f73dba9ec77acb86457a8fc04b5239822df0c14a082564737833d2963677dbc"},
654 | {file = "mypy-1.13.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:100fac22ce82925f676a734af0db922ecfea991e1d7ec0ceb1e115ebe501301a"},
655 | {file = "mypy-1.13.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7bcb0bb7f42a978bb323a7c88f1081d1b5dee77ca86f4100735a6f541299d8fb"},
656 | {file = "mypy-1.13.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bde31fc887c213e223bbfc34328070996061b0833b0a4cfec53745ed61f3519b"},
657 | {file = "mypy-1.13.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:07de989f89786f62b937851295ed62e51774722e5444a27cecca993fc3f9cd74"},
658 | {file = "mypy-1.13.0-cp38-cp38-win_amd64.whl", hash = "sha256:4bde84334fbe19bad704b3f5b78c4abd35ff1026f8ba72b29de70dda0916beb6"},
659 | {file = "mypy-1.13.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0246bcb1b5de7f08f2826451abd947bf656945209b140d16ed317f65a17dc7dc"},
660 | {file = "mypy-1.13.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7f5b7deae912cf8b77e990b9280f170381fdfbddf61b4ef80927edd813163732"},
661 | {file = "mypy-1.13.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7029881ec6ffb8bc233a4fa364736789582c738217b133f1b55967115288a2bc"},
662 | {file = "mypy-1.13.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3e38b980e5681f28f033f3be86b099a247b13c491f14bb8b1e1e134d23bb599d"},
663 | {file = "mypy-1.13.0-cp39-cp39-win_amd64.whl", hash = "sha256:a6789be98a2017c912ae6ccb77ea553bbaf13d27605d2ca20a76dfbced631b24"},
664 | {file = "mypy-1.13.0-py3-none-any.whl", hash = "sha256:9c250883f9fd81d212e0952c92dbfcc96fc237f4b7c92f56ac81fd48460b3e5a"},
665 | {file = "mypy-1.13.0.tar.gz", hash = "sha256:0291a61b6fbf3e6673e3405cfcc0e7650bebc7939659fdca2702958038bd835e"},
666 | ]
667 | mypy-extensions = [
668 | {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"},
669 | {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"},
670 | ]
671 | packaging = [
672 | {file = "packaging-24.1-py3-none-any.whl", hash = "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124"},
673 | {file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"},
674 | ]
675 | pathspec = [
676 | {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"},
677 | {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"},
678 | ]
679 | pluggy = [
680 | {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"},
681 | {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"},
682 | ]
683 | pygments = [
684 | {file = "pygments-2.18.0-py3-none-any.whl", hash = "sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a"},
685 | {file = "pygments-2.18.0.tar.gz", hash = "sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199"},
686 | ]
687 | pytest = [
688 | {file = "pytest-8.3.3-py3-none-any.whl", hash = "sha256:a6853c7375b2663155079443d2e45de913a911a11d669df02a50814944db57b2"},
689 | {file = "pytest-8.3.3.tar.gz", hash = "sha256:70b98107bd648308a7952b06e6ca9a50bc660be218d53c257cc1fc94fda10181"},
690 | ]
691 | pytest-cov = [
692 | {file = "pytest-cov-4.1.0.tar.gz", hash = "sha256:3904b13dfbfec47f003b8e77fd5b589cd11904a21ddf1ab38a64f204d6a10ef6"},
693 | {file = "pytest_cov-4.1.0-py3-none-any.whl", hash = "sha256:6ba70b9e97e69fcc3fb45bfeab2d0a138fb65c4d0d6a41ef33983ad114be8c3a"},
694 | ]
695 | pyyaml = [
696 | {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"},
697 | {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"},
698 | {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237"},
699 | {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b"},
700 | {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed"},
701 | {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180"},
702 | {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68"},
703 | {file = "PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99"},
704 | {file = "PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e"},
705 | {file = "PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774"},
706 | {file = "PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee"},
707 | {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c"},
708 | {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317"},
709 | {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85"},
710 | {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4"},
711 | {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e"},
712 | {file = "PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5"},
713 | {file = "PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44"},
714 | {file = "PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab"},
715 | {file = "PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725"},
716 | {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5"},
717 | {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425"},
718 | {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476"},
719 | {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48"},
720 | {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b"},
721 | {file = "PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4"},
722 | {file = "PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8"},
723 | {file = "PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba"},
724 | {file = "PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1"},
725 | {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133"},
726 | {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484"},
727 | {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5"},
728 | {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc"},
729 | {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652"},
730 | {file = "PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183"},
731 | {file = "PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563"},
732 | {file = "PyYAML-6.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:24471b829b3bf607e04e88d79542a9d48bb037c2267d7927a874e6c205ca7e9a"},
733 | {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7fded462629cfa4b685c5416b949ebad6cec74af5e2d42905d41e257e0869f5"},
734 | {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d84a1718ee396f54f3a086ea0a66d8e552b2ab2017ef8b420e92edbc841c352d"},
735 | {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9056c1ecd25795207ad294bcf39f2db3d845767be0ea6e6a34d856f006006083"},
736 | {file = "PyYAML-6.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:82d09873e40955485746739bcb8b4586983670466c23382c19cffecbf1fd8706"},
737 | {file = "PyYAML-6.0.2-cp38-cp38-win32.whl", hash = "sha256:43fa96a3ca0d6b1812e01ced1044a003533c47f6ee8aca31724f78e93ccc089a"},
738 | {file = "PyYAML-6.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:01179a4a8559ab5de078078f37e5c1a30d76bb88519906844fd7bdea1b7729ff"},
739 | {file = "PyYAML-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d"},
740 | {file = "PyYAML-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f"},
741 | {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290"},
742 | {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12"},
743 | {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19"},
744 | {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e"},
745 | {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725"},
746 | {file = "PyYAML-6.0.2-cp39-cp39-win32.whl", hash = "sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631"},
747 | {file = "PyYAML-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8"},
748 | {file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"},
749 | ]
750 | regex = [
751 | {file = "regex-2024.9.11-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:1494fa8725c285a81d01dc8c06b55287a1ee5e0e382d8413adc0a9197aac6408"},
752 | {file = "regex-2024.9.11-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0e12c481ad92d129c78f13a2a3662317e46ee7ef96c94fd332e1c29131875b7d"},
753 | {file = "regex-2024.9.11-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:16e13a7929791ac1216afde26f712802e3df7bf0360b32e4914dca3ab8baeea5"},
754 | {file = "regex-2024.9.11-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:46989629904bad940bbec2106528140a218b4a36bb3042d8406980be1941429c"},
755 | {file = "regex-2024.9.11-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a906ed5e47a0ce5f04b2c981af1c9acf9e8696066900bf03b9d7879a6f679fc8"},
756 | {file = "regex-2024.9.11-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e9a091b0550b3b0207784a7d6d0f1a00d1d1c8a11699c1a4d93db3fbefc3ad35"},
757 | {file = "regex-2024.9.11-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ddcd9a179c0a6fa8add279a4444015acddcd7f232a49071ae57fa6e278f1f71"},
758 | {file = "regex-2024.9.11-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6b41e1adc61fa347662b09398e31ad446afadff932a24807d3ceb955ed865cc8"},
759 | {file = "regex-2024.9.11-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ced479f601cd2f8ca1fd7b23925a7e0ad512a56d6e9476f79b8f381d9d37090a"},
760 | {file = "regex-2024.9.11-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:635a1d96665f84b292e401c3d62775851aedc31d4f8784117b3c68c4fcd4118d"},
761 | {file = "regex-2024.9.11-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:c0256beda696edcf7d97ef16b2a33a8e5a875affd6fa6567b54f7c577b30a137"},
762 | {file = "regex-2024.9.11-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:3ce4f1185db3fbde8ed8aa223fc9620f276c58de8b0d4f8cc86fd1360829edb6"},
763 | {file = "regex-2024.9.11-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:09d77559e80dcc9d24570da3745ab859a9cf91953062e4ab126ba9d5993688ca"},
764 | {file = "regex-2024.9.11-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:7a22ccefd4db3f12b526eccb129390942fe874a3a9fdbdd24cf55773a1faab1a"},
765 | {file = "regex-2024.9.11-cp310-cp310-win32.whl", hash = "sha256:f745ec09bc1b0bd15cfc73df6fa4f726dcc26bb16c23a03f9e3367d357eeedd0"},
766 | {file = "regex-2024.9.11-cp310-cp310-win_amd64.whl", hash = "sha256:01c2acb51f8a7d6494c8c5eafe3d8e06d76563d8a8a4643b37e9b2dd8a2ff623"},
767 | {file = "regex-2024.9.11-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2cce2449e5927a0bf084d346da6cd5eb016b2beca10d0013ab50e3c226ffc0df"},
768 | {file = "regex-2024.9.11-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3b37fa423beefa44919e009745ccbf353d8c981516e807995b2bd11c2c77d268"},
769 | {file = "regex-2024.9.11-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:64ce2799bd75039b480cc0360907c4fb2f50022f030bf9e7a8705b636e408fad"},
770 | {file = "regex-2024.9.11-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a4cc92bb6db56ab0c1cbd17294e14f5e9224f0cc6521167ef388332604e92679"},
771 | {file = "regex-2024.9.11-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d05ac6fa06959c4172eccd99a222e1fbf17b5670c4d596cb1e5cde99600674c4"},
772 | {file = "regex-2024.9.11-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:040562757795eeea356394a7fb13076ad4f99d3c62ab0f8bdfb21f99a1f85664"},
773 | {file = "regex-2024.9.11-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6113c008a7780792efc80f9dfe10ba0cd043cbf8dc9a76ef757850f51b4edc50"},
774 | {file = "regex-2024.9.11-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8e5fb5f77c8745a60105403a774fe2c1759b71d3e7b4ca237a5e67ad066c7199"},
775 | {file = "regex-2024.9.11-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:54d9ff35d4515debf14bc27f1e3b38bfc453eff3220f5bce159642fa762fe5d4"},
776 | {file = "regex-2024.9.11-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:df5cbb1fbc74a8305b6065d4ade43b993be03dbe0f8b30032cced0d7740994bd"},
777 | {file = "regex-2024.9.11-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:7fb89ee5d106e4a7a51bce305ac4efb981536301895f7bdcf93ec92ae0d91c7f"},
778 | {file = "regex-2024.9.11-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:a738b937d512b30bf75995c0159c0ddf9eec0775c9d72ac0202076c72f24aa96"},
779 | {file = "regex-2024.9.11-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:e28f9faeb14b6f23ac55bfbbfd3643f5c7c18ede093977f1df249f73fd22c7b1"},
780 | {file = "regex-2024.9.11-cp311-cp311-win32.whl", hash = "sha256:18e707ce6c92d7282dfce370cd205098384b8ee21544e7cb29b8aab955b66fa9"},
781 | {file = "regex-2024.9.11-cp311-cp311-win_amd64.whl", hash = "sha256:313ea15e5ff2a8cbbad96ccef6be638393041b0a7863183c2d31e0c6116688cf"},
782 | {file = "regex-2024.9.11-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:b0d0a6c64fcc4ef9c69bd5b3b3626cc3776520a1637d8abaa62b9edc147a58f7"},
783 | {file = "regex-2024.9.11-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:49b0e06786ea663f933f3710a51e9385ce0cba0ea56b67107fd841a55d56a231"},
784 | {file = "regex-2024.9.11-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5b513b6997a0b2f10e4fd3a1313568e373926e8c252bd76c960f96fd039cd28d"},
785 | {file = "regex-2024.9.11-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ee439691d8c23e76f9802c42a95cfeebf9d47cf4ffd06f18489122dbb0a7ad64"},
786 | {file = "regex-2024.9.11-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a8f877c89719d759e52783f7fe6e1c67121076b87b40542966c02de5503ace42"},
787 | {file = "regex-2024.9.11-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:23b30c62d0f16827f2ae9f2bb87619bc4fba2044911e2e6c2eb1af0161cdb766"},
788 | {file = "regex-2024.9.11-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85ab7824093d8f10d44330fe1e6493f756f252d145323dd17ab6b48733ff6c0a"},
789 | {file = "regex-2024.9.11-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8dee5b4810a89447151999428fe096977346cf2f29f4d5e29609d2e19e0199c9"},
790 | {file = "regex-2024.9.11-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:98eeee2f2e63edae2181c886d7911ce502e1292794f4c5ee71e60e23e8d26b5d"},
791 | {file = "regex-2024.9.11-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:57fdd2e0b2694ce6fc2e5ccf189789c3e2962916fb38779d3e3521ff8fe7a822"},
792 | {file = "regex-2024.9.11-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:d552c78411f60b1fdaafd117a1fca2f02e562e309223b9d44b7de8be451ec5e0"},
793 | {file = "regex-2024.9.11-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:a0b2b80321c2ed3fcf0385ec9e51a12253c50f146fddb2abbb10f033fe3d049a"},
794 | {file = "regex-2024.9.11-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:18406efb2f5a0e57e3a5881cd9354c1512d3bb4f5c45d96d110a66114d84d23a"},
795 | {file = "regex-2024.9.11-cp312-cp312-win32.whl", hash = "sha256:e464b467f1588e2c42d26814231edecbcfe77f5ac414d92cbf4e7b55b2c2a776"},
796 | {file = "regex-2024.9.11-cp312-cp312-win_amd64.whl", hash = "sha256:9e8719792ca63c6b8340380352c24dcb8cd7ec49dae36e963742a275dfae6009"},
797 | {file = "regex-2024.9.11-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:c157bb447303070f256e084668b702073db99bbb61d44f85d811025fcf38f784"},
798 | {file = "regex-2024.9.11-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:4db21ece84dfeefc5d8a3863f101995de646c6cb0536952c321a2650aa202c36"},
799 | {file = "regex-2024.9.11-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:220e92a30b426daf23bb67a7962900ed4613589bab80382be09b48896d211e92"},
800 | {file = "regex-2024.9.11-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eb1ae19e64c14c7ec1995f40bd932448713d3c73509e82d8cd7744dc00e29e86"},
801 | {file = "regex-2024.9.11-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f47cd43a5bfa48f86925fe26fbdd0a488ff15b62468abb5d2a1e092a4fb10e85"},
802 | {file = "regex-2024.9.11-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9d4a76b96f398697fe01117093613166e6aa8195d63f1b4ec3f21ab637632963"},
803 | {file = "regex-2024.9.11-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ea51dcc0835eea2ea31d66456210a4e01a076d820e9039b04ae8d17ac11dee6"},
804 | {file = "regex-2024.9.11-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b7aaa315101c6567a9a45d2839322c51c8d6e81f67683d529512f5bcfb99c802"},
805 | {file = "regex-2024.9.11-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:c57d08ad67aba97af57a7263c2d9006d5c404d721c5f7542f077f109ec2a4a29"},
806 | {file = "regex-2024.9.11-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:f8404bf61298bb6f8224bb9176c1424548ee1181130818fcd2cbffddc768bed8"},
807 | {file = "regex-2024.9.11-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:dd4490a33eb909ef5078ab20f5f000087afa2a4daa27b4c072ccb3cb3050ad84"},
808 | {file = "regex-2024.9.11-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:eee9130eaad130649fd73e5cd92f60e55708952260ede70da64de420cdcad554"},
809 | {file = "regex-2024.9.11-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6a2644a93da36c784e546de579ec1806bfd2763ef47babc1b03d765fe560c9f8"},
810 | {file = "regex-2024.9.11-cp313-cp313-win32.whl", hash = "sha256:e997fd30430c57138adc06bba4c7c2968fb13d101e57dd5bb9355bf8ce3fa7e8"},
811 | {file = "regex-2024.9.11-cp313-cp313-win_amd64.whl", hash = "sha256:042c55879cfeb21a8adacc84ea347721d3d83a159da6acdf1116859e2427c43f"},
812 | {file = "regex-2024.9.11-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:35f4a6f96aa6cb3f2f7247027b07b15a374f0d5b912c0001418d1d55024d5cb4"},
813 | {file = "regex-2024.9.11-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:55b96e7ce3a69a8449a66984c268062fbaa0d8ae437b285428e12797baefce7e"},
814 | {file = "regex-2024.9.11-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cb130fccd1a37ed894824b8c046321540263013da72745d755f2d35114b81a60"},
815 | {file = "regex-2024.9.11-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:323c1f04be6b2968944d730e5c2091c8c89767903ecaa135203eec4565ed2b2b"},
816 | {file = "regex-2024.9.11-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:be1c8ed48c4c4065ecb19d882a0ce1afe0745dfad8ce48c49586b90a55f02366"},
817 | {file = "regex-2024.9.11-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b5b029322e6e7b94fff16cd120ab35a253236a5f99a79fb04fda7ae71ca20ae8"},
818 | {file = "regex-2024.9.11-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6fff13ef6b5f29221d6904aa816c34701462956aa72a77f1f151a8ec4f56aeb"},
819 | {file = "regex-2024.9.11-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:587d4af3979376652010e400accc30404e6c16b7df574048ab1f581af82065e4"},
820 | {file = "regex-2024.9.11-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:079400a8269544b955ffa9e31f186f01d96829110a3bf79dc338e9910f794fca"},
821 | {file = "regex-2024.9.11-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:f9268774428ec173654985ce55fc6caf4c6d11ade0f6f914d48ef4719eb05ebb"},
822 | {file = "regex-2024.9.11-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:23f9985c8784e544d53fc2930fc1ac1a7319f5d5332d228437acc9f418f2f168"},
823 | {file = "regex-2024.9.11-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:ae2941333154baff9838e88aa71c1d84f4438189ecc6021a12c7573728b5838e"},
824 | {file = "regex-2024.9.11-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:e93f1c331ca8e86fe877a48ad64e77882c0c4da0097f2212873a69bbfea95d0c"},
825 | {file = "regex-2024.9.11-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:846bc79ee753acf93aef4184c040d709940c9d001029ceb7b7a52747b80ed2dd"},
826 | {file = "regex-2024.9.11-cp38-cp38-win32.whl", hash = "sha256:c94bb0a9f1db10a1d16c00880bdebd5f9faf267273b8f5bd1878126e0fbde771"},
827 | {file = "regex-2024.9.11-cp38-cp38-win_amd64.whl", hash = "sha256:2b08fce89fbd45664d3df6ad93e554b6c16933ffa9d55cb7e01182baaf971508"},
828 | {file = "regex-2024.9.11-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:07f45f287469039ffc2c53caf6803cd506eb5f5f637f1d4acb37a738f71dd066"},
829 | {file = "regex-2024.9.11-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4838e24ee015101d9f901988001038f7f0d90dc0c3b115541a1365fb439add62"},
830 | {file = "regex-2024.9.11-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6edd623bae6a737f10ce853ea076f56f507fd7726bee96a41ee3d68d347e4d16"},
831 | {file = "regex-2024.9.11-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c69ada171c2d0e97a4b5aa78fbb835e0ffbb6b13fc5da968c09811346564f0d3"},
832 | {file = "regex-2024.9.11-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:02087ea0a03b4af1ed6ebab2c54d7118127fee8d71b26398e8e4b05b78963199"},
833 | {file = "regex-2024.9.11-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:69dee6a020693d12a3cf892aba4808fe168d2a4cef368eb9bf74f5398bfd4ee8"},
834 | {file = "regex-2024.9.11-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:297f54910247508e6e5cae669f2bc308985c60540a4edd1c77203ef19bfa63ca"},
835 | {file = "regex-2024.9.11-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ecea58b43a67b1b79805f1a0255730edaf5191ecef84dbc4cc85eb30bc8b63b9"},
836 | {file = "regex-2024.9.11-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:eab4bb380f15e189d1313195b062a6aa908f5bd687a0ceccd47c8211e9cf0d4a"},
837 | {file = "regex-2024.9.11-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:0cbff728659ce4bbf4c30b2a1be040faafaa9eca6ecde40aaff86f7889f4ab39"},
838 | {file = "regex-2024.9.11-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:54c4a097b8bc5bb0dfc83ae498061d53ad7b5762e00f4adaa23bee22b012e6ba"},
839 | {file = "regex-2024.9.11-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:73d6d2f64f4d894c96626a75578b0bf7d9e56dcda8c3d037a2118fdfe9b1c664"},
840 | {file = "regex-2024.9.11-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:e53b5fbab5d675aec9f0c501274c467c0f9a5d23696cfc94247e1fb56501ed89"},
841 | {file = "regex-2024.9.11-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:0ffbcf9221e04502fc35e54d1ce9567541979c3fdfb93d2c554f0ca583a19b35"},
842 | {file = "regex-2024.9.11-cp39-cp39-win32.whl", hash = "sha256:e4c22e1ac1f1ec1e09f72e6c44d8f2244173db7eb9629cc3a346a8d7ccc31142"},
843 | {file = "regex-2024.9.11-cp39-cp39-win_amd64.whl", hash = "sha256:faa3c142464efec496967359ca99696c896c591c56c53506bac1ad465f66e919"},
844 | {file = "regex-2024.9.11.tar.gz", hash = "sha256:6c188c307e8433bcb63dc1915022deb553b4203a70722fc542c363bf120a01fd"},
845 | ]
846 | ruff = [
847 | {file = "ruff-0.7.0-py3-none-linux_armv6l.whl", hash = "sha256:0cdf20c2b6ff98e37df47b2b0bd3a34aaa155f59a11182c1303cce79be715628"},
848 | {file = "ruff-0.7.0-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:496494d350c7fdeb36ca4ef1c9f21d80d182423718782222c29b3e72b3512737"},
849 | {file = "ruff-0.7.0-py3-none-macosx_11_0_arm64.whl", hash = "sha256:214b88498684e20b6b2b8852c01d50f0651f3cc6118dfa113b4def9f14faaf06"},
850 | {file = "ruff-0.7.0-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:630fce3fefe9844e91ea5bbf7ceadab4f9981f42b704fae011bb8efcaf5d84be"},
851 | {file = "ruff-0.7.0-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:211d877674e9373d4bb0f1c80f97a0201c61bcd1e9d045b6e9726adc42c156aa"},
852 | {file = "ruff-0.7.0-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:194d6c46c98c73949a106425ed40a576f52291c12bc21399eb8f13a0f7073495"},
853 | {file = "ruff-0.7.0-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:82c2579b82b9973a110fab281860403b397c08c403de92de19568f32f7178598"},
854 | {file = "ruff-0.7.0-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9af971fe85dcd5eaed8f585ddbc6bdbe8c217fb8fcf510ea6bca5bdfff56040e"},
855 | {file = "ruff-0.7.0-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b641c7f16939b7d24b7bfc0be4102c56562a18281f84f635604e8a6989948914"},
856 | {file = "ruff-0.7.0-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d71672336e46b34e0c90a790afeac8a31954fd42872c1f6adaea1dff76fd44f9"},
857 | {file = "ruff-0.7.0-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:ab7d98c7eed355166f367597e513a6c82408df4181a937628dbec79abb2a1fe4"},
858 | {file = "ruff-0.7.0-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:1eb54986f770f49edb14f71d33312d79e00e629a57387382200b1ef12d6a4ef9"},
859 | {file = "ruff-0.7.0-py3-none-musllinux_1_2_i686.whl", hash = "sha256:dc452ba6f2bb9cf8726a84aa877061a2462afe9ae0ea1d411c53d226661c601d"},
860 | {file = "ruff-0.7.0-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:4b406c2dce5be9bad59f2de26139a86017a517e6bcd2688da515481c05a2cb11"},
861 | {file = "ruff-0.7.0-py3-none-win32.whl", hash = "sha256:f6c968509f767776f524a8430426539587d5ec5c662f6addb6aa25bc2e8195ec"},
862 | {file = "ruff-0.7.0-py3-none-win_amd64.whl", hash = "sha256:ff4aabfbaaba880e85d394603b9e75d32b0693152e16fa659a3064a85df7fce2"},
863 | {file = "ruff-0.7.0-py3-none-win_arm64.whl", hash = "sha256:10842f69c245e78d6adec7e1db0a7d9ddc2fff0621d730e61657b64fa36f207e"},
864 | {file = "ruff-0.7.0.tar.gz", hash = "sha256:47a86360cf62d9cd53ebfb0b5eb0e882193fc191c6d717e8bef4462bc3b9ea2b"},
865 | ]
866 | smmap = [
867 | {file = "smmap-5.0.1-py3-none-any.whl", hash = "sha256:e6d8668fa5f93e706934a62d7b4db19c8d9eb8cf2adbb75ef1b675aa332b69da"},
868 | {file = "smmap-5.0.1.tar.gz", hash = "sha256:dceeb6c0028fdb6734471eb07c0cd2aae706ccaecab45965ee83f11c8d3b1f62"},
869 | ]
870 | sqlfluff = [
871 | {file = "sqlfluff-3.2.4-py3-none-any.whl", hash = "sha256:a03a152e01824b2b636235fb2703270367bd6695d1644345a29f648a44f6319b"},
872 | {file = "sqlfluff-3.2.4.tar.gz", hash = "sha256:a027ded8bea1f10a4de6173e3f02363cba37ab9e344432292553549a24028931"},
873 | ]
874 | sqlparse = [
875 | {file = "sqlparse-0.5.1-py3-none-any.whl", hash = "sha256:773dcbf9a5ab44a090f3441e2180efe2560220203dc2f8c0b0fa141e18b505e4"},
876 | {file = "sqlparse-0.5.1.tar.gz", hash = "sha256:bb6b4df465655ef332548e24f08e205afc81b9ab86cb1c45657a7ff173a3a00e"},
877 | ]
878 | tblib = [
879 | {file = "tblib-3.0.0-py3-none-any.whl", hash = "sha256:80a6c77e59b55e83911e1e607c649836a69c103963c5f28a46cbeef44acf8129"},
880 | {file = "tblib-3.0.0.tar.gz", hash = "sha256:93622790a0a29e04f0346458face1e144dc4d32f493714c6c3dff82a4adb77e6"},
881 | ]
882 | toml = [
883 | {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"},
884 | {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"},
885 | ]
886 | tomli = [
887 | {file = "tomli-2.0.2-py3-none-any.whl", hash = "sha256:2ebe24485c53d303f690b0ec092806a085f07af5a5aa1464f3931eec36caaa38"},
888 | {file = "tomli-2.0.2.tar.gz", hash = "sha256:d46d457a85337051c36524bc5349dd91b1877838e2979ac5ced3e710ed8a60ed"},
889 | ]
890 | tqdm = [
891 | {file = "tqdm-4.66.5-py3-none-any.whl", hash = "sha256:90279a3770753eafc9194a0364852159802111925aa30eb3f9d85b0e805ac7cd"},
892 | {file = "tqdm-4.66.5.tar.gz", hash = "sha256:e1020aef2e5096702d8a025ac7d16b1577279c9d63f8375b63083e9a5f0fcbad"},
893 | ]
894 | types-pyyaml = [
895 | {file = "types-PyYAML-6.0.12.20240917.tar.gz", hash = "sha256:d1405a86f9576682234ef83bcb4e6fff7c9305c8b1fbad5e0bcd4f7dbdc9c587"},
896 | {file = "types_PyYAML-6.0.12.20240917-py3-none-any.whl", hash = "sha256:392b267f1c0fe6022952462bf5d6523f31e37f6cea49b14cee7ad634b6301570"},
897 | ]
898 | typing-extensions = [
899 | {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"},
900 | {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"},
901 | ]
902 | tzdata = [
903 | {file = "tzdata-2024.2-py2.py3-none-any.whl", hash = "sha256:a48093786cdcde33cad18c2555e8532f34422074448fbc874186f0abd79565cd"},
904 | {file = "tzdata-2024.2.tar.gz", hash = "sha256:7d85cc416e9382e69095b7bdf4afd9e3880418a2413feec7069d533d6b4e31cc"},
905 | ]
906 |
--------------------------------------------------------------------------------
/pre_build.py:
--------------------------------------------------------------------------------
1 | import subprocess
2 |
3 |
4 | SQUAWK_VERSION = "1.1.2"
5 |
6 |
7 | if __name__ == "__main__":
8 | subprocess.run(["mkdir", "-p", "migration_lint/bin"])
9 | subprocess.run(
10 | [
11 | "curl",
12 | "-L",
13 | f"https://github.com/sbdchd/squawk/releases/download/v{SQUAWK_VERSION}/squawk-darwin-arm64",
14 | "-o",
15 | "migration_lint/bin/squawk-darwin-arm64",
16 | ]
17 | )
18 | subprocess.run(
19 | [
20 | "curl",
21 | "-L",
22 | f"https://github.com/sbdchd/squawk/releases/download/v{SQUAWK_VERSION}/squawk-linux-x64",
23 | "-o",
24 | "migration_lint/bin/squawk-linux-x86",
25 | ]
26 | )
27 | subprocess.run(["chmod", "+x", "migration_lint/bin/squawk-darwin-arm64"])
28 | subprocess.run(["chmod", "+x", "migration_lint/bin/squawk-linux-x86"])
29 |
--------------------------------------------------------------------------------
/pyproject.toml:
--------------------------------------------------------------------------------
1 | [tool.poetry]
2 | name = "migration-lint"
3 | version = "0.2.12"
4 | description = "Tool for lint operations in DB migrations SQL"
5 | authors = ["Alexey Nikitenko "]
6 | readme = "README.md"
7 | packages = [{include = "migration_lint"}]
8 | include = ["migration_lint/bin/*"]
9 |
10 | [tool.poetry.scripts]
11 | migration-lint = 'migration_lint.main:main'
12 |
13 |
14 | [tool.poetry.dependencies]
15 | python = "^3.9"
16 |
17 | sqlfluff = "^3.2.4"
18 | click = "^8.0.3"
19 |
20 | gitpython = { version = "^3.1.43", optional = true }
21 | django = {version = ">=3.2", optional = true }
22 |
23 | [tool.poetry.group.dev]
24 | optional = true
25 |
26 | [tool.poetry.group.dev.dependencies]
27 | mypy = ">=0.990"
28 | ruff = ">=0.2"
29 | django-stubs = "^5.1.0"
30 |
31 | [tool.poetry.group.test]
32 | optional = true
33 |
34 | [tool.poetry.group.test.dependencies]
35 | pytest = ">=8.0,<9.0"
36 | pytest-cov = ">=4.1,<5.0"
37 |
38 | [tool.poetry.extras]
39 | git = ["gitpython"]
40 | django = ["django"]
41 |
42 | [tool.coverage.run]
43 | branch = true
44 |
45 | [tool.coverage.report]
46 | exclude_lines = [
47 | # Have to re-enable the standard pragma
48 | "pragma: no cover",
49 | # Don't complain about missing debug-only code:
50 | "def __repr__",
51 | "if self.debug",
52 | # Don't complain about some magic methods:
53 | "def __str__",
54 | # Don't complain if tests don't hit defensive assertion code:
55 | "raise AssertionError",
56 | "raise NotImplementedError",
57 | # Don't complain if non-runnable code isn't run:
58 | "if 0:",
59 | "if __name__ == .__main__.:",
60 | "if TYPE_CHECKING:",
61 | # Don't complain about empty realizations
62 | "pass",
63 | ]
64 | ignore_errors = true
65 |
66 | [tool.pytest.ini_options]
67 | addopts = [
68 | "--cov=migration_lint",
69 | "--cov-report=html",
70 | "--cov-report=term",
71 | "--cov-report=xml",
72 | "--junitxml=junit.xml",
73 | "-vv",
74 | ]
75 |
76 | [tool.poetry.build]
77 | generate-setup-file = false
78 | script="pre_build.py"
79 |
80 |
81 | [build-system]
82 | requires = ["poetry-core"]
83 | build-backend = "poetry.core.masonry.api"
84 |
--------------------------------------------------------------------------------
/style.rb:
--------------------------------------------------------------------------------
1 | all
2 |
3 | rule 'MD013', :line_length => 80, :tables => false
4 | exclude_rule 'MD033'
--------------------------------------------------------------------------------
/tests/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/PandaDoc/migration-lint/595a5026b3857b9213af2c0259e019525d6a38e0/tests/__init__.py
--------------------------------------------------------------------------------
/tests/test_alembic_extractor.py:
--------------------------------------------------------------------------------
1 | import subprocess
2 | from unittest import mock
3 |
4 | import pytest
5 |
6 | from migration_lint.extractor.alembic import AlembicExtractor, os
7 | from migration_lint.source_loader.model import SourceDiff
8 |
9 |
10 | def test_alembic_extractor__ok():
11 | extractor = AlembicExtractor()
12 | changed_files = [
13 | SourceDiff(path="src/db/migrations/versions/202202151945_fbea801d4464_auto.py"),
14 | SourceDiff(path="src/tables.py"),
15 | SourceDiff(path="src/services.py"),
16 | ]
17 |
18 | with mock.patch(
19 | "migration_lint.extractor.alembic.subprocess.check_output"
20 | ) as subprocess_mock:
21 | subprocess_mock.return_value = (
22 | "command\n"
23 | "CREATE TABLE t;\n"
24 | "UPDATE alembic_version SET version_num='000000000000'\n"
25 | "-- Running upgrade -> fbea801d4464\n"
26 | "ALTER TABLE t DROP COLUMN c;\n"
27 | "INSERT INTO alembic_version (version_num) VALUES (fbea801d4464) RETURNING alembic_version.version_num\n"
28 | "UPDATE alembic_version SET version_num='fbea801d4464'".encode("utf-8")
29 | )
30 | metadata = extractor.create_metadata(changed_files)
31 |
32 | assert len(metadata.migrations) == 1
33 | assert metadata.migrations[0].raw_sql == "ALTER TABLE t DROP COLUMN c;"
34 | assert metadata.changed_files[0].allowed_with_backward_incompatible is True
35 | assert metadata.changed_files[1].allowed_with_backward_incompatible is True
36 | assert metadata.changed_files[2].allowed_with_backward_incompatible is False
37 |
38 |
39 | @pytest.mark.parametrize(
40 | "command,expected_command",
41 | [
42 | (None, "make sqlmigrate"),
43 | ("alembic upgrade head --sql", "alembic upgrade head --sql"),
44 | ],
45 | )
46 | def test_alembic_extractor_command__ok(command, expected_command):
47 | extractor = AlembicExtractor(alembic_command=command)
48 | changed_files = [
49 | SourceDiff(path="src/db/migrations/versions/202202151945_fbea801d4464_auto.py"),
50 | ]
51 |
52 | with mock.patch(
53 | "migration_lint.extractor.alembic.subprocess.check_output"
54 | ) as subprocess_mock:
55 | subprocess_mock.return_value = "-- Running upgrade fbea801d4465 -> fbea801d4464\nCREATE TABLE t (id serial)".encode(
56 | "utf-8"
57 | )
58 | extractor.create_metadata(changed_files)
59 | subprocess_mock.assert_called_once_with(expected_command.split())
60 |
61 |
62 | def test_alembic_extractor_path__ok():
63 | changed_files = [
64 | SourceDiff(path="src/db/random/path/202202151945_fbea801d4464_auto.py"),
65 | ]
66 |
67 | with mock.patch(
68 | "migration_lint.extractor.alembic.subprocess.check_output"
69 | ) as subprocess_mock, mock.patch.dict(
70 | os.environ,
71 | {"MIGRATION_LINT_ALEMBIC_MIGRATIONS_PATH": "/random/path"},
72 | clear=True,
73 | ):
74 | extractor = AlembicExtractor()
75 | subprocess_mock.return_value = "-- Running upgrade fbea801d4465 -> fbea801d4464\nCREATE TABLE t (id serial)".encode(
76 | "utf-8"
77 | )
78 | metadata = extractor.create_metadata(changed_files)
79 |
80 | assert len(metadata.migrations) == 1
81 | assert metadata.changed_files[0].allowed_with_backward_incompatible is True
82 |
83 |
84 | def test_alembic_extractor__error():
85 | extractor = AlembicExtractor()
86 | changed_files = [
87 | SourceDiff(path="src/db/migrations/versions/202202151945_fbea801d4464_auto.py"),
88 | SourceDiff(path="src/tables.py"),
89 | SourceDiff(path="src/services.py"),
90 | ]
91 |
92 | with mock.patch(
93 | "migration_lint.extractor.alembic.subprocess.check_output",
94 | side_effect=subprocess.CalledProcessError(returncode=1, cmd="make sqlmigrate"),
95 | ):
96 | with pytest.raises(subprocess.CalledProcessError):
97 | extractor.create_metadata(changed_files)
98 |
--------------------------------------------------------------------------------
/tests/test_analyzer.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | from typing import List
4 | from unittest import mock
5 |
6 | import pytest
7 |
8 | from migration_lint.analyzer import Analyzer, CompatibilityLinter
9 | from migration_lint.extractor.model import (
10 | ExtendedSourceDiff,
11 | Migration,
12 | MigrationsMetadata,
13 | )
14 | from migration_lint.sql.constants import StatementType
15 |
16 | FAKE_STATEMENT = "fake sql"
17 |
18 |
19 | def get_analyzer(
20 | changed_files: List[ExtendedSourceDiff],
21 | migrations: List[Migration],
22 | ):
23 | analyzer = Analyzer(
24 | loader=mock.MagicMock(),
25 | extractor=mock.MagicMock(),
26 | linters=[CompatibilityLinter()],
27 | )
28 | analyzer.extractor.create_metadata.return_value = MigrationsMetadata(
29 | migrations=migrations,
30 | changed_files=changed_files,
31 | )
32 | return analyzer
33 |
34 |
35 | def test_analyze_no_errors():
36 | analyzer = get_analyzer(
37 | changed_files=[ExtendedSourceDiff("one")],
38 | migrations=[Migration("one", "")],
39 | )
40 | with mock.patch(
41 | "migration_lint.analyzer.compat.classify_migration"
42 | ) as classify_mock, mock.patch(
43 | "migration_lint.analyzer.base.logger.info"
44 | ) as logger_mock:
45 | classify_mock.return_value = [
46 | (FAKE_STATEMENT, StatementType.BACKWARD_COMPATIBLE)
47 | ]
48 | analyzer.analyze()
49 |
50 | logger_mock.assert_called_with("\x1b[1;32mEverything seems good!\x1b[0m")
51 |
52 |
53 | def test_analyze_ignore_migration():
54 | analyzer = get_analyzer(
55 | changed_files=[ExtendedSourceDiff("one")],
56 | migrations=[
57 | Migration(
58 | "one",
59 | "CREATE INDEX idx ON table_name (column_name); -- migration-lint: ignore",
60 | ),
61 | ],
62 | )
63 | with mock.patch("migration_lint.analyzer.base.logger.info") as logger_mock:
64 | analyzer.analyze()
65 |
66 | logger_mock.assert_any_call("\x1b[33;21mMigration is ignored.\x1b[0m")
67 |
68 |
69 | def test_analyze_no_migrations():
70 | analyzer = get_analyzer(
71 | changed_files=[ExtendedSourceDiff("one")],
72 | migrations=[],
73 | )
74 | with mock.patch(
75 | "migration_lint.analyzer.compat.classify_migration"
76 | ) as classify_mock, mock.patch(
77 | "migration_lint.analyzer.base.logger.info"
78 | ) as logger_mock:
79 | classify_mock.return_value = []
80 | analyzer.analyze()
81 |
82 | logger_mock.assert_called_with("Looks like you don't have any migration in MR.")
83 |
84 |
85 | def test_analyze_unsupported():
86 | analyzer = get_analyzer(
87 | changed_files=[ExtendedSourceDiff("one")],
88 | migrations=[Migration("one", "")],
89 | )
90 | with mock.patch(
91 | "migration_lint.analyzer.compat.classify_migration"
92 | ) as classify_mock, mock.patch(
93 | "migration_lint.analyzer.base.logger.error"
94 | ) as logger_mock:
95 | classify_mock.return_value = [(FAKE_STATEMENT, StatementType.UNSUPPORTED)]
96 | with pytest.raises(SystemExit):
97 | analyzer.analyze()
98 |
99 | logger_mock.assert_called_with(
100 | f"- Statement can't be identified: {FAKE_STATEMENT}"
101 | )
102 |
103 |
104 | def test_analyze_restricted():
105 | analyzer = get_analyzer(
106 | changed_files=[ExtendedSourceDiff("one")],
107 | migrations=[Migration("one", "")],
108 | )
109 | with mock.patch(
110 | "migration_lint.analyzer.compat.classify_migration"
111 | ) as classify_mock, mock.patch(
112 | "migration_lint.analyzer.base.logger.error"
113 | ) as logger_mock:
114 | classify_mock.return_value = [(FAKE_STATEMENT, StatementType.RESTRICTED)]
115 | with pytest.raises(SystemExit):
116 | analyzer.analyze()
117 |
118 | logger_mock.assert_has_calls(
119 | [
120 | mock.call(
121 | "- There are restricted statements in migration\n\t"
122 | "Check squawk output below for details\n\t"
123 | "Also check the doc to fix it: https://pandadoc.github.io/migration-lint/classification/\n"
124 | )
125 | ]
126 | )
127 |
128 |
129 | def test_analyze_incompatible():
130 | analyzer = get_analyzer(
131 | changed_files=[
132 | ExtendedSourceDiff("one", "one", ""),
133 | ExtendedSourceDiff("two", "two", ""),
134 | ],
135 | migrations=[Migration("", "")],
136 | )
137 | with mock.patch(
138 | "migration_lint.analyzer.compat.classify_migration"
139 | ) as classify_mock, mock.patch(
140 | "migration_lint.analyzer.base.logger.error"
141 | ) as logger_mock:
142 | classify_mock.return_value = [
143 | (FAKE_STATEMENT, StatementType.BACKWARD_INCOMPATIBLE)
144 | ]
145 | with pytest.raises(SystemExit):
146 | analyzer.analyze()
147 |
148 | logger_mock.assert_has_calls(
149 | [
150 | mock.call(
151 | (
152 | "- You have backward incompatible operations, "
153 | "which is not allowed with changes in following files:"
154 | "\n\t- one\n\t- two"
155 | "\n\n\tPlease, separate changes in different merge requests.\n"
156 | )
157 | )
158 | ]
159 | )
160 |
161 |
162 | def test_analyze_incompatible_with_allowed_files():
163 | analyzer = get_analyzer(
164 | changed_files=[
165 | ExtendedSourceDiff(
166 | "one", "one", "", allowed_with_backward_incompatible=True
167 | ),
168 | ],
169 | migrations=[Migration("one", "")],
170 | )
171 | with mock.patch(
172 | "migration_lint.analyzer.compat.classify_migration"
173 | ) as classify_mock, mock.patch(
174 | "migration_lint.analyzer.base.logger.info"
175 | ) as logger_mock:
176 | classify_mock.return_value = [
177 | (FAKE_STATEMENT, StatementType.BACKWARD_INCOMPATIBLE)
178 | ]
179 | analyzer.analyze()
180 |
181 | logger_mock.assert_called_with("\x1b[1;32mEverything seems good!\x1b[0m")
182 |
183 |
184 | def test_analyze_data_migration():
185 | analyzer = get_analyzer(
186 | changed_files=[
187 | ExtendedSourceDiff(
188 | "one", "one", "", allowed_with_backward_incompatible=True
189 | ),
190 | ExtendedSourceDiff(
191 | "two", "two", "", allowed_with_backward_incompatible=True
192 | ),
193 | ],
194 | migrations=[Migration("one", "")],
195 | )
196 | with mock.patch(
197 | "migration_lint.analyzer.compat.classify_migration"
198 | ) as classify_mock, mock.patch(
199 | "migration_lint.analyzer.base.logger.error"
200 | ) as logger_mock:
201 | classify_mock.return_value = [
202 | (FAKE_STATEMENT, StatementType.DATA_MIGRATION),
203 | (FAKE_STATEMENT, StatementType.BACKWARD_INCOMPATIBLE),
204 | ]
205 | with pytest.raises(SystemExit):
206 | analyzer.analyze()
207 |
208 | logger_mock.assert_has_calls(
209 | [
210 | mock.call(
211 | (
212 | f"- Seems like you have data migration along with schema migration: {FAKE_STATEMENT}"
213 | "\n\n\tPlease, separate changes in different merge requests.\n"
214 | )
215 | )
216 | ]
217 | )
218 |
--------------------------------------------------------------------------------
/tests/test_classify_statement.py:
--------------------------------------------------------------------------------
1 | import pytest as pytest
2 |
3 | from migration_lint.sql.constants import StatementType
4 | from migration_lint.sql.parser import classify_migration
5 |
6 |
7 | @pytest.mark.parametrize(
8 | "statement,expected_type",
9 | [
10 | (
11 | "CREATE INDEX CONCURRENTLY idx ON table_name (column_name);",
12 | StatementType.BACKWARD_COMPATIBLE,
13 | ),
14 | ("CREATE INDEX idx ON table_name (column_name);", StatementType.RESTRICTED),
15 | (
16 | "CREATE UNIQUE INDEX CONCURRENTLY idx ON table_name (column_name);",
17 | StatementType.BACKWARD_COMPATIBLE,
18 | ),
19 | (
20 | "CREATE UNIQUE INDEX idx ON table_name (column_name);",
21 | StatementType.RESTRICTED,
22 | ),
23 | ("DROP INDEX idx;", StatementType.RESTRICTED),
24 | ("DROP INDEX CONCURRENTLY idx;", StatementType.BACKWARD_COMPATIBLE),
25 | ("REINDEX INDEX CONCURRENTLY idx", StatementType.BACKWARD_COMPATIBLE),
26 | ("REINDEX INDEX idx", StatementType.RESTRICTED),
27 | ("ALTER INDEX idx RENAME TO new_name;", StatementType.BACKWARD_COMPATIBLE),
28 | ("CREATE SEQUENCE name;", StatementType.BACKWARD_COMPATIBLE),
29 | ("ALTER SEQUENCE name START 0;", StatementType.BACKWARD_COMPATIBLE),
30 | ("ALTER TABLE t_name RENAME TO new_name;", StatementType.RESTRICTED),
31 | (
32 | "ALTER TABLE t_name ADD COLUMN c_name text NULL;",
33 | StatementType.BACKWARD_COMPATIBLE,
34 | ),
35 | (
36 | "ALTER TABLE t_name ADD COLUMN c_name text NOT NULL;",
37 | StatementType.RESTRICTED,
38 | ),
39 | (
40 | "ALTER TABLE t_name ADD COLUMN c_name integer NOT NULL DEFAULT 0;",
41 | StatementType.BACKWARD_COMPATIBLE,
42 | ),
43 | (
44 | "ALTER TABLE t_name ADD COLUMN c_name integer NULL DEFAULT 0;",
45 | StatementType.BACKWARD_COMPATIBLE,
46 | ),
47 | (
48 | "ALTER TABLE t_name ADD COLUMN c_name bigserial PRIMARY KEY;",
49 | StatementType.BACKWARD_INCOMPATIBLE,
50 | ),
51 | (
52 | "ALTER TABLE t_name ADD COLUMN c_name UUID PRIMARY KEY;",
53 | StatementType.BACKWARD_INCOMPATIBLE,
54 | ),
55 | (
56 | "ALTER TABLE t_name ALTER COLUMN c_name SET NOT NULL;",
57 | StatementType.BACKWARD_INCOMPATIBLE,
58 | ),
59 | (
60 | "ALTER TABLE t_name ALTER COLUMN c_name DROP NOT NULL;",
61 | StatementType.BACKWARD_COMPATIBLE,
62 | ),
63 | (
64 | "ALTER TABLE t_name ALTER COLUMN c_name SET DEFAULT 0;",
65 | StatementType.BACKWARD_COMPATIBLE,
66 | ),
67 | (
68 | "ALTER TABLE t_name ALTER COLUMN c_name DROP DEFAULT;",
69 | StatementType.BACKWARD_INCOMPATIBLE,
70 | ),
71 | (
72 | "ALTER TABLE t_name DROP CONSTRAINT c_name;",
73 | StatementType.BACKWARD_COMPATIBLE,
74 | ),
75 | (
76 | "ALTER TABLE t_name ADD CONSTRAINT name FOREIGN KEY (c_name) REFERENCES some_table (id);",
77 | StatementType.RESTRICTED,
78 | ),
79 | (
80 | "ALTER TABLE t_name ADD CONSTRAINT name FOREIGN KEY (c_name) REFERENCES some_table (id) NOT VALID;",
81 | StatementType.BACKWARD_COMPATIBLE,
82 | ),
83 | ("UPDATE t_name SET col=0", StatementType.DATA_MIGRATION),
84 | ("DELETE FROM t_name WHERE col=0", StatementType.DATA_MIGRATION),
85 | (
86 | "INSERT INTO t_name (id, name) VALUES (1, 'foo')",
87 | StatementType.DATA_MIGRATION,
88 | ),
89 | (
90 | "ALTER TABLE t_name VALIDATE CONSTRAINT c_name;",
91 | StatementType.BACKWARD_COMPATIBLE,
92 | ),
93 | (
94 | "ALTER TABLE t_name ALTER COLUMN c_name TYPE text;",
95 | StatementType.BACKWARD_COMPATIBLE,
96 | ),
97 | (
98 | "ALTER TABLE t_name ALTER COLUMN c_name TYPE varchar(10);",
99 | StatementType.RESTRICTED,
100 | ),
101 | (
102 | "ALTER TABLE t_name ADD CONSTRAINT c_name UNIQUE (col);",
103 | StatementType.RESTRICTED,
104 | ),
105 | (
106 | "ALTER TABLE t_name ADD CONSTRAINT c_name UNIQUE USING INDEX i_name;",
107 | StatementType.BACKWARD_COMPATIBLE,
108 | ),
109 | (
110 | "ALTER TABLE t_name ADD CONSTRAINT c_name PRIMARY KEY USING INDEX i_name",
111 | StatementType.BACKWARD_INCOMPATIBLE,
112 | ),
113 | (
114 | "ALTER TABLE t_name RENAME COLUMN c_name TO another_name",
115 | StatementType.BACKWARD_INCOMPATIBLE,
116 | ),
117 | (
118 | """
119 | CREATE TRIGGER tr_name
120 | BEFORE INSERT ON t_name
121 | FOR EACH ROW EXECUTE FUNCTION f_name()
122 | """,
123 | StatementType.BACKWARD_COMPATIBLE,
124 | ),
125 | (
126 | "DROP TRIGGER t_name ON tbl_name",
127 | StatementType.BACKWARD_COMPATIBLE,
128 | ),
129 | (
130 | """
131 | CREATE OR REPLACE FUNCTION f_name() RETURNS TRIGGER AS $$
132 | BEGIN
133 | NEW."new_id" := NEW."id";
134 | RETURN NEW;
135 | END $$ LANGUAGE plpgsql
136 | """,
137 | StatementType.BACKWARD_COMPATIBLE,
138 | ),
139 | (
140 | "DROP FUNCTION t_name",
141 | StatementType.BACKWARD_COMPATIBLE,
142 | ),
143 | (
144 | "ALTER TABLE t_name ADD COLUMN id BIGINT GENERATED BY DEFAULT AS IDENTITY",
145 | StatementType.RESTRICTED,
146 | ),
147 | (
148 | "ALTER TABLE t_name ALTER COLUMN id ADD GENERATED BY DEFAULT AS IDENTITY",
149 | StatementType.BACKWARD_COMPATIBLE,
150 | ),
151 | ],
152 | )
153 | def test_classify_migration(statement: str, expected_type: StatementType):
154 | result = classify_migration(statement)
155 | assert len(result) == 1
156 | assert result[0][1] == expected_type
157 |
158 |
159 | def test_ignore_order():
160 | result = classify_migration(
161 | "ALTER TABLE t_name ADD COLUMN c_name integer NOT NULL DEFAULT 0;"
162 | )
163 | assert len(result) == 1
164 | assert result[0][1] == StatementType.BACKWARD_COMPATIBLE
165 |
166 | result = classify_migration(
167 | "ALTER TABLE t_name ADD COLUMN c_name integer DEFAULT 0 NOT NULL;"
168 | )
169 | assert len(result) == 1
170 | assert result[0][1] == StatementType.BACKWARD_COMPATIBLE
171 |
172 | result = classify_migration(
173 | "ALTER TABLE t_name ADD COLUMN c_name integer NOT NULL;"
174 | )
175 | assert len(result) == 1
176 | assert result[0][1] == StatementType.RESTRICTED
177 |
178 |
179 | @pytest.mark.parametrize(
180 | "sql,expected_type",
181 | [
182 | (
183 | """
184 | CREATE TABLE t_name (id serial, c_name integer);
185 | ALTER TABLE t_name ADD CONSTRAINT c_name CHECK (col > 0);
186 | """,
187 | StatementType.BACKWARD_COMPATIBLE,
188 | ),
189 | (
190 | """
191 | CREATE TABLE t_name (id serial, c_name integer);
192 | ALTER TABLE another_table ADD CONSTRAINT c_name CHECK (col > 0);
193 | """,
194 | StatementType.RESTRICTED,
195 | ),
196 | (
197 | """
198 | CREATE TABLE t_name (id serial, c_name integer);
199 | ALTER TABLE t_name ADD FOREIGN KEY (c_name) REFERENCES some_table (id);
200 | """,
201 | StatementType.BACKWARD_COMPATIBLE,
202 | ),
203 | (
204 | """
205 | ALTER TABLE t_name VALIDATE CONSTRAINT c_name;
206 | ALTER TABLE t_name ALTER COLUMN col SET NOT NULL;
207 | """,
208 | StatementType.BACKWARD_COMPATIBLE,
209 | ),
210 | ],
211 | )
212 | def test_conditionally_safe(sql: str, expected_type: StatementType):
213 | result = classify_migration(sql)
214 | assert len(result) == 2
215 | # check the last statement type
216 | assert result[-1][1] == expected_type
217 |
218 |
219 | @pytest.mark.parametrize(
220 | "statement,expected_count",
221 | [
222 | ("SET statement_timeout=5;", 0),
223 | ("SET statement_timeout=5; DROP INDEX idx;", 1),
224 | ("BEGIN; ALTER TABLE t_name ADD COLUMN c_name text NULL; COMMIT;", 1),
225 | ],
226 | )
227 | def test_ignore_statements(statement: str, expected_count: int):
228 | statement_types = classify_migration(statement)
229 | assert len(statement_types) == expected_count
230 |
--------------------------------------------------------------------------------
/tests/test_django_extractor.py:
--------------------------------------------------------------------------------
1 | from unittest import mock
2 |
3 | from migration_lint.extractor.django import DjangoExtractor
4 | from migration_lint.source_loader.model import SourceDiff
5 |
6 |
7 | def test_django_extractor__ok():
8 | extractor = DjangoExtractor()
9 | changed_files = [
10 | SourceDiff(path="documents/migrations/0001_initial.py"),
11 | SourceDiff(path="documents/models.py"),
12 | SourceDiff(path="documents/services.py"),
13 | ]
14 |
15 | with mock.patch(
16 | "migration_lint.extractor.django.subprocess.check_output"
17 | ) as subprocess_mock:
18 | subprocess_mock.return_value = (
19 | "command\nMonkeypatching..\nALTER TABLE t DROP COLUMN c;".encode("utf-8")
20 | )
21 | metadata = extractor.create_metadata(changed_files)
22 |
23 | assert len(metadata.migrations) == 1
24 | assert metadata.migrations[0].raw_sql == "ALTER TABLE t DROP COLUMN c;"
25 | assert metadata.changed_files[0].allowed_with_backward_incompatible is True
26 | assert metadata.changed_files[1].allowed_with_backward_incompatible is True
27 | assert metadata.changed_files[2].allowed_with_backward_incompatible is False
28 |
--------------------------------------------------------------------------------
/tests/test_django_management_extractor.py:
--------------------------------------------------------------------------------
1 | from unittest import mock
2 |
3 | from migration_lint.django.extractor.django_management import DjangoManagementExtractor
4 | from migration_lint.source_loader.model import SourceDiff
5 |
6 |
7 | def test_django_extractor__ok():
8 | extractor = DjangoManagementExtractor()
9 | changed_files = [
10 | SourceDiff(path="documents/migrations/0001_initial.py"),
11 | SourceDiff(path="documents/models.py"),
12 | SourceDiff(path="documents/services.py"),
13 | ]
14 |
15 | with mock.patch(
16 | "migration_lint.django.extractor.django_management.django.apps.apps.get_app_configs"
17 | ) as get_app_configs_mock:
18 | with mock.patch(
19 | "migration_lint.django.extractor.django_management.call_command"
20 | ) as call_command_mock:
21 | call_command_mock.return_value = "ALTER TABLE t DROP COLUMN c;"
22 | get_app_configs_mock.return_value = []
23 | metadata = extractor.create_metadata(changed_files)
24 |
25 | assert len(metadata.migrations) == 1
26 | assert metadata.migrations[0].raw_sql == "ALTER TABLE t DROP COLUMN c;"
27 | assert metadata.changed_files[0].allowed_with_backward_incompatible is True
28 | assert metadata.changed_files[1].allowed_with_backward_incompatible is True
29 | assert metadata.changed_files[2].allowed_with_backward_incompatible is False
30 |
--------------------------------------------------------------------------------
/tests/test_flyway_extractor.py:
--------------------------------------------------------------------------------
1 | from unittest import mock
2 |
3 | from migration_lint.extractor.flyway import FlywayExtractor
4 | from migration_lint.source_loader.model import SourceDiff
5 |
6 |
7 | def test_flyway_extractor__ok():
8 | extractor = FlywayExtractor()
9 | changed_files = [
10 | SourceDiff(path="src/resources/db/migration/v1_some_migration.sql"),
11 | SourceDiff(path="src/main/Listeners.java"),
12 | SourceDiff(path="src/main/Listeners.kt"),
13 | ]
14 |
15 | sql = "ALTER TABLE t DROP COLUMN c;"
16 | mocked_open = mock.mock_open(read_data=sql)
17 | with mock.patch("builtins.open", mocked_open):
18 | metadata = extractor.create_metadata(changed_files)
19 |
20 | assert len(metadata.migrations) == 1
21 | assert metadata.migrations[0].raw_sql == "ALTER TABLE t DROP COLUMN c;"
22 | assert metadata.changed_files[0].allowed_with_backward_incompatible is True
23 | assert metadata.changed_files[1].allowed_with_backward_incompatible is False
24 | assert metadata.changed_files[2].allowed_with_backward_incompatible is False
25 |
--------------------------------------------------------------------------------
/tests/test_gitlab_loader.py:
--------------------------------------------------------------------------------
1 | import json
2 | from unittest import mock
3 |
4 | import pytest
5 |
6 | from migration_lint.source_loader.gitlab import GitlabBranchLoader, GitlabMRLoader, os
7 |
8 |
9 | def test_gitlab_branch_loader():
10 | loader = GitlabBranchLoader(
11 | branch="branch_name",
12 | project_id="000",
13 | gitlab_api_key="key",
14 | gitlab_instance="https://gitlab.example.com",
15 | only_new_files=True,
16 | )
17 |
18 | with mock.patch("migration_lint.source_loader.gitlab.urlopen") as urlopen_mock:
19 | urlopen_mock().__enter__().read.return_value = json.dumps(
20 | {
21 | "diffs": [
22 | {
23 | "new_path": "a.py",
24 | "old_path": None,
25 | "diff": "",
26 | "deleted_file": False,
27 | "new_file": True,
28 | },
29 | {
30 | "new_path": None,
31 | "old_path": "c.py",
32 | "diff": "",
33 | "deleted_file": True,
34 | "new_file": False,
35 | },
36 | {
37 | "new_path": "d.py",
38 | "old_path": "d.py",
39 | "diff": "",
40 | "deleted_file": False,
41 | "new_file": False,
42 | },
43 | ]
44 | }
45 | ).encode("utf-8")
46 |
47 | changed_files = loader.get_changed_files()
48 |
49 | assert len(changed_files) == 1
50 | assert changed_files[0].path == "a.py"
51 | assert (
52 | urlopen_mock.call_args_list[1].args[0].full_url
53 | == "https://gitlab.example.com/api/v4/projects/000/repository/compare?from=master&to=branch_name"
54 | )
55 |
56 |
57 | def test_gitlab_branch_loader_on_changed_files():
58 | loader = GitlabBranchLoader(
59 | branch="branch_name",
60 | project_id="000",
61 | gitlab_api_key="key",
62 | gitlab_instance="https://gitlab.example.com",
63 | only_new_files=False,
64 | )
65 |
66 | with mock.patch("migration_lint.source_loader.gitlab.urlopen") as urlopen_mock:
67 | urlopen_mock().__enter__().read.return_value = json.dumps(
68 | {
69 | "diffs": [
70 | {
71 | "new_path": "a.py",
72 | "old_path": None,
73 | "diff": "",
74 | "deleted_file": False,
75 | "new_file": True,
76 | },
77 | {
78 | "new_path": None,
79 | "old_path": "c.py",
80 | "diff": "",
81 | "deleted_file": True,
82 | "new_file": False,
83 | },
84 | {
85 | "new_path": "d.py",
86 | "old_path": "d.py",
87 | "diff": "",
88 | "deleted_file": False,
89 | "new_file": False,
90 | },
91 | ]
92 | }
93 | ).encode("utf-8")
94 |
95 | changed_files = loader.get_changed_files()
96 |
97 | assert len(changed_files) == 2
98 | assert changed_files[0].path == "a.py"
99 | assert (
100 | urlopen_mock.call_args_list[1].args[0].full_url
101 | == "https://gitlab.example.com/api/v4/projects/000/repository/compare?from=master&to=branch_name"
102 | )
103 | assert changed_files[1].path == "d.py"
104 |
105 |
106 | def test_gitlab_branch_loader_not_configured():
107 | with pytest.raises(RuntimeError):
108 | GitlabBranchLoader(
109 | branch=None,
110 | project_id=None,
111 | gitlab_api_key=None,
112 | gitlab_instance=None,
113 | only_new_files=True,
114 | )
115 |
116 |
117 | @mock.patch.dict(os.environ, {"CI_DEFAULT_BRANCH": "dev"}, clear=True)
118 | def test_gitlab_branch_loader_default_branch():
119 | loader = GitlabBranchLoader(
120 | branch="branch_name",
121 | project_id="000",
122 | gitlab_api_key="key",
123 | gitlab_instance="https://gitlab.example.com",
124 | only_new_files=True,
125 | )
126 |
127 | with mock.patch("migration_lint.source_loader.gitlab.urlopen") as urlopen_mock:
128 | urlopen_mock().__enter__().read.return_value = json.dumps(
129 | {
130 | "diffs": [
131 | {
132 | "new_path": "a.py",
133 | "old_path": None,
134 | "diff": "",
135 | "deleted_file": False,
136 | "new_file": True,
137 | },
138 | {
139 | "new_path": None,
140 | "old_path": "c.py",
141 | "diff": "",
142 | "deleted_file": True,
143 | "new_file": False,
144 | },
145 | {
146 | "new_path": "d.py",
147 | "old_path": "d.py",
148 | "diff": "",
149 | "deleted_file": False,
150 | "new_file": False,
151 | },
152 | ]
153 | }
154 | ).encode("utf-8")
155 |
156 | changed_files = loader.get_changed_files()
157 |
158 | assert len(changed_files) == 1
159 | assert changed_files[0].path == "a.py"
160 | assert (
161 | urlopen_mock.call_args_list[1].args[0].full_url
162 | == "https://gitlab.example.com/api/v4/projects/000/repository/compare?from=dev&to=branch_name"
163 | )
164 |
165 |
166 | def test_gitlab_mr_loader():
167 | loader = GitlabMRLoader(
168 | mr_id="100",
169 | project_id="000",
170 | gitlab_api_key="key",
171 | gitlab_instance="https://gitlab.example.com",
172 | only_new_files=True,
173 | )
174 |
175 | with mock.patch("migration_lint.source_loader.gitlab.urlopen") as urlopen_mock:
176 | urlopen_mock().__enter__().read.side_effect = [
177 | json.dumps({"web_url": "fake mr url"}).encode("utf-8"),
178 | json.dumps(
179 | [
180 | {
181 | "new_path": "a.py",
182 | "old_path": None,
183 | "diff": "",
184 | "deleted_file": False,
185 | "new_file": True,
186 | },
187 | {
188 | "new_path": None,
189 | "old_path": "c.py",
190 | "diff": "",
191 | "deleted_file": True,
192 | "new_file": False,
193 | },
194 | {
195 | "new_path": "d.py",
196 | "old_path": "d.py",
197 | "diff": "",
198 | "deleted_file": False,
199 | "new_file": False,
200 | },
201 | ]
202 | ).encode("utf-8"),
203 | ]
204 |
205 | changed_files = loader.get_changed_files()
206 |
207 | assert len(changed_files) == 1
208 | assert changed_files[0].path == "a.py"
209 | assert (
210 | urlopen_mock.call_args_list[1].args[0].full_url
211 | == "https://gitlab.example.com/api/v4/projects/000/merge_requests/100"
212 | )
213 | assert (
214 | urlopen_mock.call_args_list[2].args[0].full_url
215 | == "https://gitlab.example.com/api/v4/projects/000/merge_requests/100/diffs"
216 | )
217 |
218 |
219 | def test_gitlab_mr_loader_on_changed_files():
220 | loader = GitlabMRLoader(
221 | mr_id="100",
222 | project_id="000",
223 | gitlab_api_key="key",
224 | gitlab_instance="https://gitlab.example.com",
225 | only_new_files=False,
226 | )
227 |
228 | with mock.patch("migration_lint.source_loader.gitlab.urlopen") as urlopen_mock:
229 | urlopen_mock().__enter__().read.side_effect = [
230 | json.dumps({"web_url": "fake mr url"}).encode("utf-8"),
231 | json.dumps(
232 | [
233 | {
234 | "new_path": "a.py",
235 | "old_path": None,
236 | "diff": "",
237 | "deleted_file": False,
238 | "new_file": True,
239 | },
240 | {
241 | "new_path": None,
242 | "old_path": "c.py",
243 | "diff": "",
244 | "deleted_file": True,
245 | "new_file": False,
246 | },
247 | {
248 | "new_path": "d.py",
249 | "old_path": "d.py",
250 | "diff": "",
251 | "deleted_file": False,
252 | "new_file": False,
253 | },
254 | ]
255 | ).encode("utf-8"),
256 | ]
257 |
258 | changed_files = loader.get_changed_files()
259 |
260 | assert len(changed_files) == 2
261 | assert changed_files[0].path == "a.py"
262 | assert (
263 | urlopen_mock.call_args_list[1].args[0].full_url
264 | == "https://gitlab.example.com/api/v4/projects/000/merge_requests/100"
265 | )
266 | assert (
267 | urlopen_mock.call_args_list[2].args[0].full_url
268 | == "https://gitlab.example.com/api/v4/projects/000/merge_requests/100/diffs"
269 | )
270 |
271 | assert changed_files[1].path == "d.py"
272 |
273 |
274 | def test_gitlab_mr_loader_not_configured():
275 | with pytest.raises(RuntimeError):
276 | GitlabMRLoader(
277 | mr_id=None,
278 | project_id=None,
279 | gitlab_api_key=None,
280 | gitlab_instance=None,
281 | only_new_files=True,
282 | )
283 |
--------------------------------------------------------------------------------
/tests/test_main.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | from unittest import mock
4 |
5 | import pytest
6 | from click.testing import CliRunner
7 |
8 | from migration_lint.extractor.django import DjangoExtractor
9 | from migration_lint.main import main
10 | from migration_lint.source_loader.local import LocalLoader
11 |
12 |
13 | @pytest.mark.parametrize("squawk_config", [[], ["--squawk-config-path=.squawk.toml"]])
14 | def test_main(squawk_config: list):
15 | runner = CliRunner()
16 |
17 | with mock.patch("migration_lint.main.Analyzer") as analyzer_mock:
18 | result = runner.invoke(
19 | main,
20 | ["--loader=local_git", "--extractor=django", *squawk_config],
21 | )
22 |
23 | assert result.exit_code == 0, result.stdout
24 | assert isinstance(analyzer_mock.call_args.kwargs["loader"], LocalLoader)
25 | assert isinstance(analyzer_mock.call_args.kwargs["extractor"], DjangoExtractor)
26 |
--------------------------------------------------------------------------------
/tests/test_squawk_linter.py:
--------------------------------------------------------------------------------
1 | from unittest.mock import patch
2 |
3 | import pytest
4 |
5 | from migration_lint.analyzer.squawk import SquawkLinter
6 |
7 | FAKE_STATEMENT = "fake sql"
8 |
9 |
10 | @pytest.mark.parametrize(
11 | "platform, result",
12 | [("linux", "squawk-linux-x86"), ("darwin", "squawk-darwin-arm64")],
13 | )
14 | @patch("migration_lint.__path__", ["migration_lint_path"])
15 | def test_platform(platform: str, result: str):
16 | with patch("sys.platform", platform):
17 | linter = SquawkLinter()
18 | assert f"migration_lint_path/bin/{result}" == linter.squawk
19 |
20 |
21 | def test_unsupported_platform():
22 | with patch("sys.platform", "win32"):
23 | with pytest.raises(RuntimeError, match="unsupported platform: win32"):
24 | SquawkLinter()
25 |
26 |
27 | @pytest.mark.parametrize(
28 | "params, result_flags",
29 | [
30 | ({}, ""),
31 | ({"config_path": ".squawk.toml"}, "--config=.squawk.toml"),
32 | ({"pg_version": "13.0"}, " --pg-version=13.0"),
33 | (
34 | {"config_path": ".squawk.toml", "pg_version": "13.0"},
35 | "--config=.squawk.toml --pg-version=13.0",
36 | ),
37 | ],
38 | ids=["Without params", "With config", "With pg version", "With all params"],
39 | )
40 | @patch("migration_lint.__path__", ["path"])
41 | @patch("sys.platform", "linux")
42 | def test_squawk_command(params: dict, result_flags: str):
43 | ignored_rules = ["ignored-rule"]
44 |
45 | with patch.object(
46 | SquawkLinter, "ignored_rules", new_callable=lambda: ignored_rules
47 | ):
48 | linter = SquawkLinter(**params)
49 |
50 | result = linter.squawk_command(FAKE_STATEMENT)
51 |
52 | expected_result = f"path/bin/squawk-linux-x86 --exclude=ignored-rule {result_flags}"
53 | assert result == expected_result.strip()
54 |
--------------------------------------------------------------------------------