├── .github
├── dependabot.yml
└── workflows
│ ├── publish.yml
│ └── test.yml
├── .gitignore
├── .pre-commit-config.yaml
├── .python-version
├── README.md
├── pyproject.toml
├── pytest_scrutinize
├── __init__.py
├── data.py
├── io.py
├── mocks.py
├── plugin.py
├── plugin_xdist.py
├── timer.py
└── utils.py
├── requirements-dev.lock
├── requirements.lock
└── tests
├── conftest.py
├── django_app
├── __init__.py
├── apps.py
├── models.py
└── settings.py
├── examples
├── test_django.py
├── test_mock.py
└── test_simple.py
└── test_plugin.py
/.github/dependabot.yml:
--------------------------------------------------------------------------------
1 | version: 2
2 | updates:
3 | - package-ecosystem: pip
4 | directory: "/"
5 | schedule:
6 | interval: daily
7 | groups:
8 | python-packages:
9 | patterns:
10 | - "*"
11 |
12 | - package-ecosystem: github-actions
13 | directory: "/"
14 | schedule:
15 | interval: daily
16 | groups:
17 | github-actions:
18 | patterns:
19 | - "*"
20 |
--------------------------------------------------------------------------------
/.github/workflows/publish.yml:
--------------------------------------------------------------------------------
1 | name: Publish to PyPI 📦
2 |
3 | on:
4 | workflow_dispatch:
5 |
6 | release:
7 | types: [ created ]
8 |
9 | jobs:
10 | build-and-publish:
11 | name: Publish 🐍
12 | runs-on: ubuntu-latest
13 | environment:
14 | name: pypi
15 | url: https://pypi.org/p/pytest-scrutinize
16 | permissions:
17 | id-token: write
18 | contents: read
19 | steps:
20 | - uses: actions/checkout@v4
21 |
22 | - name: Set up Python
23 | uses: actions/setup-python@v5
24 | with:
25 | python-version: 3.11
26 |
27 | - name: Install pip and wheel
28 | run: python -m pip install --upgrade pip wheel
29 |
30 | - name: Setup Rye
31 | uses: eifinger/setup-rye@v4
32 |
33 | - name: Build
34 | run: rye build
35 |
36 | - name: Publish distribution 📦 to PyPI
37 | uses: pypa/gh-action-pypi-publish@release/v1
38 |
--------------------------------------------------------------------------------
/.github/workflows/test.yml:
--------------------------------------------------------------------------------
1 | name: Test 🚀
2 |
3 | on:
4 | push:
5 | pull_request:
6 |
7 | concurrency:
8 | group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
9 | cancel-in-progress: true
10 |
11 | jobs:
12 | pre-commit:
13 | runs-on: ubuntu-latest
14 | steps:
15 | - uses: actions/checkout@v4
16 | - uses: actions/setup-python@v5
17 | - uses: pre-commit/action@v3.0.1
18 |
19 | test:
20 | name: Test 🚀
21 | runs-on: ubuntu-latest
22 |
23 | strategy:
24 | fail-fast: false
25 | matrix:
26 | python: [ "3.9", "3.10", "3.11", "3.12" ]
27 | pytest: [ "6", "7", "8" ]
28 |
29 | steps:
30 | - uses: actions/checkout@v4
31 |
32 | - name: Set up Python
33 | uses: actions/setup-python@v5
34 | with:
35 | cache: pip
36 | cache-dependency-path: requirements-dev.lock
37 | python-version: ${{ matrix.python }}
38 |
39 | - name: Install pip and wheel
40 | run: python -m pip install --upgrade pip wheel
41 |
42 | - name: Setup Rye
43 | uses: eifinger/setup-rye@v4
44 | with:
45 | enable-cache: true
46 |
47 | - name: Install deps
48 | run: rye sync
49 |
50 | - name: Setup Pytest
51 | run: rye install --force "pytest~=${{ matrix.pytest }}.0"
52 |
53 | - name: Run Pytest
54 | run: rye test
55 |
56 | - name: Upload test timings
57 | uses: actions/upload-artifact@v4
58 | with:
59 | name: test-timings-${{ matrix.python }}-${{ matrix.pytest }}
60 | retention-days: 1
61 | path: |
62 | test-timings.jsonl.gz
63 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | .idea/
2 | test-timings.jsonl.gz
3 |
4 | # python generated files
5 | __pycache__/
6 | *.py[oc]
7 | build/
8 | dist/
9 | wheels/
10 | *.egg-info
11 |
12 | # venv
13 | .venv
14 |
--------------------------------------------------------------------------------
/.pre-commit-config.yaml:
--------------------------------------------------------------------------------
1 | repos:
2 | - repo: https://github.com/astral-sh/ruff-pre-commit
3 | rev: v0.9.7
4 | hooks:
5 | # Run the linter.
6 | - id: ruff
7 | args: [ --fix ]
8 | # Run the formatter.
9 | - id: ruff-format
10 |
--------------------------------------------------------------------------------
/.python-version:
--------------------------------------------------------------------------------
1 | 3.12.3
2 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # pytest-scrutinize
2 |
3 |     
4 |
5 | Big test suites for large projects can be a pain to optimize. `pytest-scrutinize` helps you
6 | profile your test runs by exporting *detailed* timings as JSON for the following things:
7 |
8 | - Tests
9 | - [Fixture setup/teardowns](#fixture-setup-and-teardown)
10 | - [Django SQL queries](#django-sql-queries)
11 | - [pytest-xdist](https://pypi.org/project/pytest-xdist/) worker boot times
12 | - [Arbitrary functions](#record-additional-functions-)
13 | - [Garbage collections](#garbage-collection)
14 | - Pytest setup/collection times
15 |
16 | All data is associated with the currently executing test or fixture. As an example, you can
17 | use this to find all the Django SQL queries executed within a given fixture across your
18 | entire test suite.
19 |
20 | ## Installation:
21 |
22 | Install with pip [from PyPI](https://pypi.org/project/pytest-scrutinize/)
23 |
24 | ```
25 | pip install pytest-scrutinize
26 | ```
27 |
28 | ## Usage:
29 |
30 | Run your test suite with the `--scrutinize` flag, passing a file path to write to:
31 |
32 | ```
33 | pytest --scrutinize=test-timings.jsonl.gz
34 | ```
35 |
36 | ## Analysing the results
37 |
38 |
39 | A tool to help with analysing this data is not included yet, however it can be quickly explored
40 | with [DuckDB](https://duckdb.org/). For example, to find the top 10 fixtures by total duration
41 | along with the number of tests that where executed:
42 |
43 | ```sql
44 | select name,
45 | to_microseconds(sum(runtime.as_microseconds)::bigint) as duration,
46 | count(distinct test_id) as test_count
47 | from 'test-timings.jsonl.gz'
48 | where type = 'fixture'
49 | group by all
50 | order by duration desc
51 | limit 10;
52 | ```
53 |
54 | Or the tests with the highest number of duplicated SQL queries executed as part of it or
55 | any fixture it depends on:
56 |
57 | ```sql
58 | select test_id,
59 | sum(count) as duplicate_queries,
60 | count(distinct sql_hash) as unique_queries,
61 | FROM (SELECT test_id, fixture_name, sql_hash, COUNT(*) AS count
62 | from 'test-timings.jsonl.gz'
63 | where type = 'django-sql'
64 | GROUP BY all
65 | HAVING count > 1)
66 | group by all
67 | order by duplicate_queries desc limit 10;
68 | ```
69 |
70 | ## Data captured:
71 |
72 | The resulting file will contain newline-delimited JSON objects. The Pydantic models for these
73 | can be [found here](./pytest_scrutinize/data.py).
74 |
75 | All events captured contain a `meta` structure that contains the `xdist` worker (if any), the
76 | absolute time the timing was taken and the Python thread name that the timing was captured in.
77 |
78 |
79 | Meta example
80 |
81 | ```json
82 | {
83 | "meta": {
84 | "worker": "gw0",
85 | "recorded_at": "2024-08-17T22:02:44.956924Z",
86 | "thread_id": 3806124,
87 | "thread_name": "MainThread"
88 | }
89 | }
90 | ```
91 |
92 |
93 |
94 | All durations are expressed with the same structure, containing the duration in different formats:
95 | nanoseconds, microseconds, ISO 8601 and text
96 |
97 |
98 | Duration example
99 |
100 | ```json
101 | {
102 | "runtime": {
103 | "as_nanoseconds": 60708,
104 | "as_microseconds": 60,
105 | "as_iso": "PT0.00006S",
106 | "as_text": "60 microseconds"
107 | }
108 | }
109 | ```
110 |
111 |
112 |
113 | ### Fixture setup and teardown
114 |
115 | Pytest fixtures can be simple functions, or context managers that can clean up resources after a
116 | test has finished. `pytest-scrutinize` records both the setup _and_ teardown times for all fixtures,
117 | allowing you to precisely locate performance bottlenecks:
118 |
119 | ```python
120 | @pytest.fixture
121 | def slow_teardown():
122 | yield
123 | time.sleep(1)
124 | ```
125 |
126 |
127 | Example
128 |
129 | ```json
130 | {
131 | "meta": {
132 | "worker": "master",
133 | "recorded_at": "2024-08-17T21:23:54.736177Z",
134 | "thread_name": "MainThread"
135 | },
136 | "type": "fixture",
137 | "name": "pytest_django.plugin._django_set_urlconf",
138 | "short_name": "_django_set_urlconf",
139 | "test_id": "tests/test_plugin.py::test_all[normal]",
140 | "scope": "function",
141 | "setup": {
142 | "as_nanoseconds": 5792,
143 | "as_microseconds": 5,
144 | "as_iso": "PT0.000005S",
145 | "as_text": "5 microseconds"
146 | },
147 | "teardown": {
148 | "as_nanoseconds": 2167,
149 | "as_microseconds": 2,
150 | "as_iso": "PT0.000002S",
151 | "as_text": "2 microseconds"
152 | },
153 | "runtime": {
154 | "as_nanoseconds": 7959,
155 | "as_microseconds": 7,
156 | "as_iso": "PT0.000007S",
157 | "as_text": "7 microseconds"
158 | }
159 | }
160 | ```
161 |
162 |
163 |
164 | ### Django SQL queries
165 |
166 | Information on Django SQL queries can be captured with the `--scrutinize-django-sql` flag. By
167 | default, the hash of the SQL query is captured (allowing you to count duplicate queries), but
168 | the raw SQL can also be captured:
169 |
170 | ```shell
171 | # Log the hashes of the executed SQL queries
172 | pytest --scrutinize=test-timings.jsonl.gz --scrutinize-django-sql
173 | # Log raw SQL queries. Warning: May produce very large files!
174 | pytest --scrutinize=test-timings.jsonl.gz --scrutinize-django-sql=query
175 | ```
176 |
177 |
178 | Example
179 |
180 | ```json
181 | {
182 | "meta": {
183 | "worker": "master",
184 | "recorded_at": "2024-08-17T22:02:47.218492Z",
185 | "thread_name": "MainThread"
186 | },
187 | "name": "django_sql",
188 | "test_id": "test_django.py::test_case",
189 | "fixture_name": "test_django.teardown_fixture",
190 | "runtime": {
191 | "as_nanoseconds": 18375,
192 | "as_microseconds": 18,
193 | "as_iso": "PT0.000018S",
194 | "as_text": "18 microseconds"
195 | },
196 | "type": "django-sql",
197 | "sql_hash": "be0beb84a58eab3bdc1fc4214f90abe9e937e5cc7f54008e02ab81d51533bc16",
198 | "sql": "INSERT INTO \"django_app_dummymodel\" (\"foo\") VALUES (%s) RETURNING \"django_app_dummymodel\".\"id\""
199 | }
200 | ```
201 |
202 |
203 |
204 | ### Record additional functions
205 |
206 | Any arbitrary Python function can be captured by passing a comma-separated string of paths to
207 | `--scrutinize-func`:
208 |
209 | ```shell
210 | # Record all boto3 clients that are created, along with their timings:
211 | pytest --scrutinize=test-timings.jsonl.gz --scrutinize-func=botocore.session.Session.create_client
212 | ```
213 |
214 |
215 | Example
216 |
217 | ```json
218 | {
219 | "meta": {
220 | "worker": "gw0",
221 | "recorded_at": "2024-08-17T22:02:44.296938Z",
222 | "thread_name": "MainThread"
223 | },
224 | "name": "urllib.parse.parse_qs",
225 | "test_id": "test_mock.py::test_case",
226 | "fixture_name": "test_mock.teardown_fixture",
227 | "runtime": {
228 | "as_nanoseconds": 2916,
229 | "as_microseconds": 2,
230 | "as_iso": "PT0.000002S",
231 | "as_text": "2 microseconds"
232 | },
233 | "type": "mock"
234 | }
235 | ```
236 |
237 |
238 |
239 | ### Garbage collection
240 |
241 | Garbage collection events can be captured with the `--scrutinize-gc` flag. Every GC is captured,
242 | along with the total time and number of objects collected. This can be used to find tests that
243 | generate significant GC pressure by creating lots of circular-referenced objects:
244 |
245 | ```shell
246 | pytest --scrutinize=test-timings.jsonl.gz --scrutinize-gc
247 | ```
248 |
249 |
250 | Example
251 |
252 | ```json
253 | {
254 | "meta": {
255 | "worker": "gw0",
256 | "recorded_at": "2024-08-17T22:02:44.962665Z",
257 | "thread_name": "MainThread"
258 | },
259 | "type": "gc",
260 | "runtime": {
261 | "as_nanoseconds": 5404333,
262 | "as_microseconds": 5404,
263 | "as_iso": "PT0.005404S",
264 | "as_text": "5404 microseconds"
265 | },
266 | "collected_count": 279,
267 | "generation": 2
268 | }
269 | ```
270 |
271 |
--------------------------------------------------------------------------------
/pyproject.toml:
--------------------------------------------------------------------------------
1 | [project]
2 | name = "pytest-scrutinize"
3 | version = "0.1.6"
4 | description = "Scrutinize your pytest test suites for slow fixtures, tests and more."
5 | authors = [
6 | { name = "Thomas Forbes", email = "tom@tomforb.es" }
7 | ]
8 | dependencies = [
9 | "pytest>=6",
10 | "pydantic>=2,<=3",
11 | ]
12 | readme = "README.md"
13 | requires-python = ">= 3.9"
14 | keywords = ["pytest", "timing"]
15 | license = {text = "MIT License"}
16 | classifiers = [
17 | "License :: OSI Approved :: MIT License",
18 | "Development Status :: 4 - Beta",
19 | "Intended Audience :: Developers",
20 | "Programming Language :: Python :: 3",
21 | "Programming Language :: Python :: 3.9",
22 | "Programming Language :: Python :: 3.10",
23 | "Programming Language :: Python :: 3.11",
24 | "Programming Language :: Python :: 3.12",
25 | ]
26 |
27 | [project.urls]
28 | Homepage = "https://github.com/orf/pytest-scrutinize/"
29 | Repository = "https://github.com/orf/pytest-scrutinize.git"
30 | Issues = "https://github.com/orf/pytest-scrutinize/issues"
31 |
32 |
33 | [build-system]
34 | requires = ["hatchling"]
35 | build-backend = "hatchling.build"
36 |
37 | [project.entry-points.pytest11]
38 | pytest-scrutinize = "pytest_scrutinize.plugin"
39 |
40 | [tool.rye]
41 | managed = true
42 | universal = true
43 | generate-hashes = true
44 | dev-dependencies = [
45 | "ipython>=8.26.0",
46 | "ipdb>=0.13.13",
47 | "mypy>=1.11.1",
48 | "pytest-xdist>=3.6.1",
49 | "pyright>=1.1.376",
50 | "pytest-pretty>=1.2.0",
51 | "devtools>=0.12.2",
52 | "django>=5.1",
53 | "pytest-django>=4.8.0",
54 | ]
55 |
56 | [tool.hatch.metadata]
57 | allow-direct-references = true
58 |
59 | [tool.pytest.ini_options]
60 | addopts = """
61 | --runpytest=subprocess
62 | --scrutinize=test-timings.jsonl.gz
63 | --scrutinize-gc
64 | """
65 | pytester_example_dir = "tests/examples"
66 | norecursedirs = "tests/examples"
67 |
68 | [tool.mypy]
69 | plugins = [
70 | "pydantic.mypy"
71 | ]
72 |
73 | [[tool.mypy.overrides]]
74 | module = [
75 | "xdist.workermanage",
76 | ]
77 | ignore_missing_imports = true
78 |
--------------------------------------------------------------------------------
/pytest_scrutinize/__init__.py:
--------------------------------------------------------------------------------
1 | import typing
2 |
3 | import pydantic
4 | from typing import Union
5 | from .data import (
6 | GCTiming,
7 | CollectionTiming,
8 | WorkerTiming,
9 | MockTiming,
10 | TestTiming,
11 | FixtureTiming,
12 | DjangoSQLTiming,
13 | )
14 |
15 | Timing = typing.Annotated[
16 | Union[
17 | GCTiming,
18 | CollectionTiming,
19 | WorkerTiming,
20 | MockTiming,
21 | TestTiming,
22 | FixtureTiming,
23 | DjangoSQLTiming,
24 | ],
25 | pydantic.Field(discriminator="type"),
26 | ]
27 |
28 | TimingAdapter = pydantic.TypeAdapter(Timing)
29 |
--------------------------------------------------------------------------------
/pytest_scrutinize/data.py:
--------------------------------------------------------------------------------
1 | import abc
2 | import threading
3 | from datetime import datetime
4 | from typing import Literal
5 |
6 | import pydantic
7 | from pydantic import computed_field
8 | from pydantic.fields import Field
9 |
10 | from .timer import now, Duration
11 |
12 |
13 | def get_worker_field_default() -> str:
14 | # Work around circular imports. to-do: refactor this
15 | from .plugin_xdist import get_worker_id
16 |
17 | return get_worker_id()
18 |
19 |
20 | class Meta(pydantic.BaseModel):
21 | worker: str = Field(default_factory=get_worker_field_default)
22 | recorded_at: datetime = Field(default_factory=now)
23 | thread_name: str = Field(default_factory=lambda: threading.current_thread().name)
24 |
25 |
26 | class BaseTiming(pydantic.BaseModel, abc.ABC):
27 | meta: Meta = Field(default_factory=Meta)
28 |
29 |
30 | class GCTiming(BaseTiming):
31 | type: Literal["gc"] = "gc"
32 |
33 | runtime: Duration
34 | collected_count: int
35 | generation: int
36 |
37 |
38 | class CollectionTiming(BaseTiming):
39 | type: Literal["collection"] = "collection"
40 |
41 | runtime: Duration
42 |
43 |
44 | class WorkerTiming(BaseTiming):
45 | type: Literal["worker"] = "worker"
46 |
47 | ready: Duration
48 | runtime: Duration | None = None
49 |
50 |
51 | class BaseMockTiming(BaseTiming, abc.ABC):
52 | name: str
53 | test_id: str | None
54 | fixture_name: str | None
55 |
56 | runtime: Duration
57 |
58 |
59 | class MockTiming(BaseMockTiming):
60 | type: Literal["mock"] = "mock"
61 |
62 |
63 | class DjangoSQLTiming(BaseMockTiming):
64 | type: Literal["django-sql"] = "django-sql"
65 | sql_hash: str
66 | sql: str | None
67 |
68 |
69 | class TestTiming(BaseTiming):
70 | type: Literal["test"] = "test"
71 |
72 | name: str
73 | test_id: str
74 | requires: list[str]
75 |
76 | runtime: Duration
77 |
78 |
79 | class FixtureTiming(BaseTiming):
80 | type: Literal["fixture"] = "fixture"
81 |
82 | name: str
83 | short_name: str
84 | test_id: str | None
85 | scope: str
86 |
87 | setup: Duration
88 | teardown: Duration | None
89 |
90 | @computed_field # type: ignore[prop-decorator]
91 | @property
92 | def runtime(self) -> Duration:
93 | if self.teardown is not None:
94 | return self.setup + self.teardown
95 | return self.setup
96 |
--------------------------------------------------------------------------------
/pytest_scrutinize/io.py:
--------------------------------------------------------------------------------
1 | import contextlib
2 | import gzip
3 | import typing
4 | from dataclasses import dataclass, field
5 | from pathlib import Path
6 |
7 | if typing.TYPE_CHECKING:
8 | from pytest_scrutinize.data import BaseTiming
9 |
10 |
11 | @dataclass
12 | class TimingsOutputFile:
13 | path: Path
14 | buffer: list["BaseTiming"] = field(default_factory=list)
15 |
16 | fd: typing.TextIO | None = None
17 |
18 | def add_timing(self, timing: "BaseTiming"):
19 | self.buffer.append(timing)
20 |
21 | def flush_buffer(self):
22 | if self.fd is None:
23 | raise RuntimeError("Output file not opened")
24 | # Get a reference to the buffer, then replace it with an
25 | # empty list. We do this because the GC callbacks _could_
26 | # cause an append to the list mid-iteration, or after the
27 | # write loop has finished.
28 | buffer = self.buffer
29 | self.buffer = []
30 | for timing in buffer:
31 | self.fd.write(timing.model_dump_json())
32 | self.fd.write("\n")
33 |
34 | @contextlib.contextmanager
35 | def initialize_writer(self) -> typing.Generator[typing.TextIO, None, None]:
36 | if self.fd is not None:
37 | raise RuntimeError("Output file already opened")
38 |
39 | with gzip.open(self.path, mode="wt", compresslevel=6) as fd:
40 | self.fd = fd
41 | try:
42 | yield fd
43 | finally:
44 | self.flush_buffer()
45 | self.fd = None
46 |
47 | @contextlib.contextmanager
48 | def get_reader(self) -> typing.Generator[typing.TextIO, None, None]:
49 | if self.fd is not None:
50 | raise RuntimeError("Output file not closed")
51 |
52 | with gzip.open(self.path, mode="rt") as fd:
53 | yield fd
54 |
--------------------------------------------------------------------------------
/pytest_scrutinize/mocks.py:
--------------------------------------------------------------------------------
1 | import contextlib
2 | import pkgutil
3 | from dataclasses import dataclass, field
4 | from typing import Any, Callable, Self, Literal
5 | from unittest import mock
6 | import hashlib
7 | import pydantic
8 |
9 | from pytest_scrutinize.io import TimingsOutputFile
10 | from pytest_scrutinize.timer import measure_time, Duration
11 | from pytest_scrutinize.data import MockTiming, DjangoSQLTiming, BaseMockTiming
12 |
13 |
14 | class SingleMockRecorder(pydantic.BaseModel):
15 | name: str
16 | mocked: Any
17 | original_callable: Callable
18 |
19 | @classmethod
20 | def from_dotted_path(cls, name: str, mock_path: str, **kwargs) -> Self:
21 | class_path, attribute_name = mock_path.rsplit(".", 1)
22 | original_object = pkgutil.resolve_name(class_path)
23 | original_callable = getattr(original_object, attribute_name)
24 | mocked = mock.patch(mock_path, side_effect=None, autospec=True)
25 | return cls(
26 | name=name, mocked=mocked, original_callable=original_callable, **kwargs
27 | )
28 |
29 | def record_timing(
30 | self,
31 | fixture_name: str | None,
32 | elapsed: Duration,
33 | test_id: str | None,
34 | *,
35 | args: tuple[Any, ...],
36 | kwargs: dict[str, Any],
37 | ) -> BaseMockTiming:
38 | return MockTiming(
39 | fixture_name=fixture_name,
40 | runtime=elapsed,
41 | name=self.name,
42 | test_id=test_id,
43 | )
44 |
45 | @contextlib.contextmanager
46 | def record_mock(
47 | self, output: TimingsOutputFile, test_id: str | None, fixture_name: str | None
48 | ):
49 | if self.mocked.kwargs["side_effect"] is not None:
50 | raise RuntimeError(f"Recursive mock call for mock {self}")
51 |
52 | def wrapped(*args, **kwargs):
53 | with measure_time() as timer:
54 | result = self.original_callable(*args, **kwargs)
55 | output.add_timing(
56 | self.record_timing(
57 | fixture_name, timer.elapsed, test_id, args=args, kwargs=kwargs
58 | )
59 | )
60 | return result
61 |
62 | self.mocked.kwargs["side_effect"] = wrapped
63 | try:
64 | with self.mocked:
65 | yield
66 | finally:
67 | self.mocked.kwargs["side_effect"] = None
68 |
69 |
70 | class DjangoSQLRecorder(SingleMockRecorder):
71 | mode: Literal[True, "query"]
72 |
73 | def record_timing(
74 | self,
75 | fixture_name: str | None,
76 | elapsed: Duration,
77 | test_id: str | None,
78 | *,
79 | args: tuple[Any, ...],
80 | kwargs: dict[str, Any],
81 | ) -> DjangoSQLTiming:
82 | # The django.db.backends.utils.CursorWrapper._execute function takes the
83 | # SQL as the second argument (the first being `self`):
84 | query: str | bytes = args[1]
85 | query_str = query
86 | if isinstance(query, str):
87 | query = query.encode()
88 | else:
89 | query_str = query.decode()
90 | sql_hash = hashlib.sha256(query, usedforsecurity=False).hexdigest()
91 |
92 | sql = None
93 | if self.mode == "query":
94 | # Include the full query here
95 | sql = query_str
96 | return DjangoSQLTiming(
97 | fixture_name=fixture_name,
98 | runtime=elapsed,
99 | name=self.name,
100 | test_id=test_id,
101 | sql_hash=sql_hash,
102 | sql=sql,
103 | )
104 |
105 |
106 | @dataclass
107 | class MockRecorder:
108 | mocks: frozenset[str]
109 | output: TimingsOutputFile
110 | enable_django_sql: Literal[True, "query"] | None
111 |
112 | _mock_funcs: dict[str, SingleMockRecorder] = field(default_factory=dict)
113 |
114 | @contextlib.contextmanager
115 | def record(self, test_id: str | None, fixture_name: str | None):
116 | # We want to avoid recursive mock calls, which can happen when using `getfixturevalue`.
117 | # Detecting if something is a mock is a pain, so we just use a token-style system:
118 | # we take the entire _mock_funcs dictionary when we set up the mocks, and replace it
119 | # when we are done.
120 |
121 | if not self._mock_funcs:
122 | yield
123 | return
124 |
125 | mock_funcs = self._mock_funcs
126 | self._mock_funcs = {}
127 |
128 | try:
129 | with contextlib.ExitStack() as stack:
130 | for single_mock in mock_funcs.values():
131 | stack.enter_context(
132 | single_mock.record_mock(self.output, test_id, fixture_name)
133 | )
134 | yield
135 | finally:
136 | self._mock_funcs = mock_funcs
137 |
138 | @contextlib.contextmanager
139 | def initialize_mocks(self):
140 | for mock_path in self.mocks:
141 | self._mock_funcs[mock_path] = SingleMockRecorder.from_dotted_path(
142 | name=mock_path, mock_path=mock_path
143 | )
144 |
145 | if self.enable_django_sql is not None:
146 | self._mock_funcs["django_sql"] = DjangoSQLRecorder.from_dotted_path(
147 | name="django_sql",
148 | mock_path="django.db.backends.utils.CursorWrapper._execute",
149 | mode=self.enable_django_sql,
150 | )
151 |
152 | try:
153 | yield
154 | finally:
155 | self._mock_funcs.clear()
156 |
--------------------------------------------------------------------------------
/pytest_scrutinize/plugin.py:
--------------------------------------------------------------------------------
1 | import contextlib
2 | import gc
3 | import shutil
4 | import tempfile
5 | import typing
6 | from pathlib import Path
7 | from typing import Literal
8 |
9 | import pydantic
10 | import pytest
11 |
12 | from .io import TimingsOutputFile
13 | from .mocks import MockRecorder
14 | from .data import (
15 | CollectionTiming,
16 | FixtureTiming,
17 | TestTiming,
18 | GCTiming,
19 | )
20 | from .utils import is_generator_fixture
21 | from .timer import Timer, measure_time
22 |
23 | if typing.TYPE_CHECKING:
24 | from _pytest.fixtures import FixtureDef, SubRequest
25 |
26 |
27 | @pytest.hookimpl
28 | def pytest_addoption(parser: pytest.Parser):
29 | group = parser.getgroup("scrutinize", "Structured timing output")
30 | group.addoption(
31 | "--scrutinize",
32 | metavar="N",
33 | action="store",
34 | type=Path,
35 | help="Store structured timing output to this file",
36 | )
37 | group.addoption(
38 | "--scrutinize-func",
39 | action="append",
40 | type=str,
41 | nargs="?",
42 | help="Comma separated list of functions to record",
43 | )
44 | group.addoption(
45 | "--scrutinize-gc", action="store_true", help="Record garbage collections"
46 | )
47 | group.addoption(
48 | "--scrutinize-django-sql",
49 | nargs="?",
50 | choices=["hash", "query"],
51 | default=False,
52 | const=True,
53 | help="Record Django SQL queries",
54 | )
55 |
56 |
57 | class Config(pydantic.BaseModel):
58 | output_path: Path
59 | mocks: frozenset[str]
60 | enable_gc: bool
61 | enable_django_sql: Literal[True, "query"] | None
62 |
63 |
64 | def pytest_configure(config: pytest.Config):
65 | if output_path := config.getoption("--scrutinize"):
66 | assert isinstance(output_path, Path)
67 |
68 | enable_gc = typing.cast(bool, config.getoption("--scrutinize-gc") or False)
69 |
70 | enable_django_sql = typing.cast(
71 | Literal[True, "query"] | None,
72 | config.getoption("--scrutinize-django-sql") or None,
73 | )
74 |
75 | mocks = typing.cast(list[str], config.getoption("--scrutinize-func"))
76 | if mocks is None:
77 | mocks = frozenset()
78 | else:
79 | mocks = {
80 | stripped_mock
81 | for mocks_arg in mocks
82 | for mock_path in mocks_arg.split(",")
83 | if (stripped_mock := mock_path.strip())
84 | }
85 | plugin_config = Config(
86 | output_path=output_path,
87 | mocks=frozenset(mocks),
88 | enable_gc=enable_gc,
89 | enable_django_sql=enable_django_sql,
90 | )
91 |
92 | plugin_cls: type[DetailedTimingsPlugin]
93 | has_xdist = config.pluginmanager.get_plugin("xdist") is not None
94 |
95 | from . import plugin_xdist
96 |
97 | is_xdist_master = plugin_xdist.is_master()
98 | match (has_xdist, is_xdist_master):
99 | case (True, True):
100 | plugin_cls = plugin_xdist.XDistMasterDetailedTimingsPlugin
101 | case (True, False):
102 | plugin_cls = plugin_xdist.XDistWorkerDetailedTimingsPlugin
103 | case (False, _):
104 | plugin_cls = DetailedTimingsPlugin
105 | case _:
106 | assert False, f"unreachable: {has_xdist=} {is_xdist_master=}"
107 |
108 | plugin = plugin_cls(plugin_config)
109 | config.pluginmanager.register(plugin, name=__name__)
110 |
111 |
112 | @pytest.hookimpl(hookwrapper=True)
113 | def pytest_runtestloop(session: pytest.Session):
114 | if plugin := session.config.pluginmanager.get_plugin(__name__):
115 | with plugin.run(session):
116 | yield
117 | else:
118 | yield
119 |
120 |
121 | class DetailedTimingsPlugin:
122 | config: Config
123 | output: TimingsOutputFile
124 | mock_recorder: MockRecorder
125 |
126 | def __init__(self, config: Config):
127 | self.config = config
128 |
129 | temp_dir = Path(tempfile.mkdtemp())
130 | temp_output_path = temp_dir / "output.jsonl.gz"
131 | self.output = TimingsOutputFile(temp_output_path)
132 | self.mock_recorder = MockRecorder(
133 | mocks=config.mocks,
134 | output=self.output,
135 | enable_django_sql=self.config.enable_django_sql,
136 | )
137 |
138 | if config.enable_gc:
139 | self.setup_gc_callbacks()
140 |
141 | @contextlib.contextmanager
142 | def run(self, session: pytest.Session) -> typing.Generator[typing.Self, None, None]:
143 | with self.output.initialize_writer(), self.mock_recorder.initialize_mocks():
144 | yield self
145 |
146 | self.create_final_output_file(session)
147 |
148 | def create_final_output_file(self, session: pytest.Session):
149 | shutil.move(src=self.output.path, dst=self.config.output_path)
150 |
151 | @pytest.hookimpl(hookwrapper=True)
152 | def pytest_collection(self, session: pytest.Session):
153 | with measure_time() as timer:
154 | yield
155 |
156 | self.output.add_timing(CollectionTiming(runtime=timer.elapsed))
157 |
158 | @pytest.hookimpl(hookwrapper=True)
159 | def pytest_runtest_protocol(self, item: pytest.Item, nextitem: pytest.Item | None):
160 | try:
161 | yield
162 | finally:
163 | self.output.flush_buffer()
164 |
165 | @pytest.hookimpl(hookwrapper=True)
166 | def pytest_pyfunc_call(self, pyfuncitem: pytest.Function):
167 | with self.mock_recorder.record(test_id=pyfuncitem.nodeid, fixture_name=None):
168 | with measure_time() as timer:
169 | yield
170 |
171 | test_timing = TestTiming(
172 | name=pyfuncitem.name,
173 | test_id=pyfuncitem.nodeid,
174 | requires=pyfuncitem.fixturenames,
175 | runtime=timer.elapsed,
176 | )
177 |
178 | self.output.add_timing(test_timing)
179 |
180 | @pytest.hookimpl(hookwrapper=True)
181 | def pytest_fixture_setup(self, fixturedef: "FixtureDef", request: "SubRequest"):
182 | is_function_scope = fixturedef.scope == "function"
183 | full_name = f"{fixturedef.func.__module__}.{fixturedef.func.__qualname__}"
184 |
185 | # Don't associate non-function scoped fixtures with a given test
186 | test_id = request.node.nodeid if is_function_scope else None
187 |
188 | setup_timer: Timer
189 | teardown_timer: Timer | None = None
190 |
191 | def fixture_done():
192 | nonlocal setup_timer, teardown_timer
193 | teardown_duration_ns = (
194 | teardown_timer.elapsed if teardown_timer is not None else None
195 | )
196 |
197 | self.output.add_timing(
198 | FixtureTiming(
199 | name=full_name,
200 | short_name=fixturedef.func.__qualname__,
201 | test_id=test_id,
202 | scope=request.scope,
203 | setup=setup_timer.elapsed,
204 | teardown=teardown_duration_ns,
205 | )
206 | )
207 |
208 | fixturedef.addfinalizer(fixture_done)
209 |
210 | if not is_generator_fixture(fixturedef.func):
211 | with self.mock_recorder.record(test_id=test_id, fixture_name=full_name):
212 | with measure_time() as setup_timer:
213 | yield
214 | else:
215 | # We want to capture the teardown times for fixtures. This is non-trivial as
216 | # pytest exposes no hooks to allow you to do this.
217 | # However, we can use finalizers for this: we first attach a finalizer _before_
218 | # the fixture is executed, then attach a finalizer _after_ the fixture is executed.
219 | # Pytest hooks are run in reverse order: the first hook to run
220 | # will be the `record_teardown_finish` finalizer, and the first will be the `record_teardown_start`.
221 |
222 | teardown_mock_capture: typing.ContextManager | None = None
223 | teardown_timer = Timer()
224 |
225 | def teardown_fixture_start():
226 | nonlocal teardown_mock_capture
227 | teardown_mock_capture = self.mock_recorder.record(
228 | test_id=test_id, fixture_name=full_name
229 | )
230 | teardown_mock_capture.__enter__()
231 | teardown_timer.__enter__()
232 |
233 | def teardown_fixture_finish():
234 | teardown_timer.__exit__(None, None, None)
235 | if teardown_mock_capture is not None:
236 | teardown_mock_capture.__exit__(None, None, None)
237 |
238 | fixturedef.addfinalizer(teardown_fixture_finish)
239 | with self.mock_recorder.record(test_id=test_id, fixture_name=full_name):
240 | with measure_time() as setup_timer:
241 | yield
242 |
243 | fixturedef.addfinalizer(teardown_fixture_start)
244 |
245 | def setup_gc_callbacks(self):
246 | gc_timer = Timer()
247 |
248 | def gc_callback(phase: Literal["start", "stop"], info: dict[str, int]):
249 | if phase == "start":
250 | gc_timer.start()
251 | else:
252 | gc_timer.stop()
253 |
254 | self.output.add_timing(
255 | GCTiming(
256 | runtime=gc_timer.elapsed,
257 | collected_count=info["collected"],
258 | generation=info["generation"],
259 | )
260 | )
261 |
262 | gc.callbacks.append(gc_callback)
263 |
--------------------------------------------------------------------------------
/pytest_scrutinize/plugin_xdist.py:
--------------------------------------------------------------------------------
1 | import os
2 | import shutil
3 | import typing
4 |
5 | from pathlib import Path
6 | from typing import Any, Sequence
7 |
8 | import pytest
9 |
10 | from pytest_scrutinize.plugin import DetailedTimingsPlugin
11 | from pytest_scrutinize.data import WorkerTiming, Meta
12 |
13 | from pytest_scrutinize.timer import Timer
14 | from .io import TimingsOutputFile
15 |
16 | if typing.TYPE_CHECKING:
17 | from .plugin import Config
18 |
19 | try:
20 | from xdist.workermanage import WorkerController
21 | except ImportError:
22 | pass
23 |
24 |
25 | def get_worker_id() -> str:
26 | return os.environ.get("PYTEST_XDIST_WORKER", "master")
27 |
28 |
29 | def is_master() -> bool:
30 | return get_worker_id() == "master"
31 |
32 |
33 | _worker_output_key = f"{__name__}.output"
34 |
35 |
36 | class XDistWorkerDetailedTimingsPlugin(DetailedTimingsPlugin):
37 | @pytest.hookimpl()
38 | def pytest_sessionfinish(self, session: pytest.Session, exitstatus: int):
39 | if workeroutput := getattr(session.config, "workeroutput", None):
40 | workeroutput[_worker_output_key] = str(self.output.path.absolute())
41 |
42 | def create_final_output_file(self, session: pytest.Session):
43 | return
44 |
45 |
46 | class XDistMasterDetailedTimingsPlugin(DetailedTimingsPlugin):
47 | setup_nodes_timer: Timer
48 | worker_timings: dict[str, WorkerTiming]
49 | worker_output_files: list[Path]
50 |
51 | def __init__(self, config: "Config"):
52 | super().__init__(config=config)
53 |
54 | self.setup_nodes_timer = Timer()
55 | self.worker_timings = {}
56 | self.worker_output_files = []
57 |
58 | @pytest.hookimpl()
59 | def pytest_xdist_setupnodes(self, config: pytest.Config, specs: Sequence[Any]):
60 | self.setup_nodes_timer.start()
61 |
62 | @pytest.hookimpl()
63 | def pytest_testnodeready(self, node: "WorkerController"):
64 | duration = self.setup_nodes_timer.elapsed
65 |
66 | worker_id = node.workerinfo["id"]
67 | self.worker_timings[worker_id] = WorkerTiming(
68 | meta=Meta(worker=worker_id),
69 | ready=duration,
70 | )
71 |
72 | def create_final_output_file(self, session: pytest.Session):
73 | final_output_file = TimingsOutputFile(path=self.config.output_path)
74 | files_to_combine = [self.output.path] + self.worker_output_files
75 |
76 | with final_output_file.initialize_writer() as output_writer:
77 | for input_path in files_to_combine:
78 | with TimingsOutputFile(path=input_path).get_reader() as output_reader:
79 | shutil.copyfileobj(fsrc=output_reader, fdst=output_writer)
80 |
81 | def pytest_testnodedown(self, node: "WorkerController", error: Any):
82 | if workeroutput := getattr(node, "workeroutput", None):
83 | worker_id = node.workerinfo["id"]
84 | if worker_timing := self.worker_timings.get(worker_id, None):
85 | # time since setup nodes was invoked
86 | worker_timing.runtime = self.setup_nodes_timer.elapsed
87 | self.output.add_timing(worker_timing)
88 |
89 | if output_path := workeroutput.get(_worker_output_key, None):
90 | self.worker_output_files.append(Path(output_path))
91 |
--------------------------------------------------------------------------------
/pytest_scrutinize/timer.py:
--------------------------------------------------------------------------------
1 | import contextlib
2 | import dataclasses
3 | import time
4 | from datetime import datetime, UTC, timedelta
5 | from typing import Generator
6 |
7 | import pydantic
8 | from pydantic import computed_field
9 | from types import SimpleNamespace
10 |
11 | # Freezegun (and likely other similar libraries) does some insane stuff to try and ensure that
12 | # time.* and datetime.* functions are patched - it scans every module and looks for any module
13 | # level attribute that is a function it is patching, then patches it.
14 | # We store the original functions inside a class so that it is not replaced under our feet.
15 |
16 | _time_funcs = SimpleNamespace(perf_ns=time.perf_counter_ns, now=datetime.now)
17 |
18 |
19 | def now() -> datetime:
20 | return _time_funcs.now(tz=UTC)
21 |
22 |
23 | @contextlib.contextmanager
24 | def measure_time() -> Generator["Timer", None, None]:
25 | with Timer() as timer:
26 | yield timer
27 |
28 |
29 | @dataclasses.dataclass
30 | class Timer:
31 | _start: int | None = None
32 | _end: int | None = None
33 |
34 | @property
35 | def elapsed(self) -> "Duration":
36 | if self._start is None:
37 | raise RuntimeError("Timer not started")
38 |
39 | end = _time_funcs.perf_ns() if self._end is None else self._end
40 | return Duration(as_nanoseconds=end - self._start)
41 |
42 | def start(self):
43 | self.reset()
44 | self._start = _time_funcs.perf_ns()
45 |
46 | def stop(self):
47 | self._end = _time_funcs.perf_ns()
48 |
49 | def reset(self):
50 | self._start, self._end = None, None
51 |
52 | def __enter__(self) -> "Timer":
53 | self.start()
54 | return self
55 |
56 | def __exit__(self, exc_type, exc_val, exc_tb):
57 | self.stop()
58 |
59 |
60 | class Duration(pydantic.BaseModel):
61 | as_nanoseconds: int
62 |
63 | @computed_field # type: ignore[prop-decorator]
64 | @property
65 | def as_microseconds(self) -> int:
66 | return self.as_nanoseconds // 1_000
67 |
68 | @computed_field # type: ignore[prop-decorator]
69 | @property
70 | def as_iso(self) -> timedelta:
71 | return timedelta(microseconds=self.as_microseconds)
72 |
73 | @computed_field # type: ignore[prop-decorator]
74 | @property
75 | def as_text(self) -> str:
76 | return f"{self.as_microseconds} microseconds"
77 |
78 | def __add__(self, other: "Duration") -> "Duration":
79 | return Duration(as_nanoseconds=self.as_nanoseconds + other.as_nanoseconds)
80 |
--------------------------------------------------------------------------------
/pytest_scrutinize/utils.py:
--------------------------------------------------------------------------------
1 | import inspect
2 |
3 |
4 | def is_generator_fixture(func):
5 | genfunc = inspect.isgeneratorfunction(func)
6 | return genfunc and not inspect.iscoroutinefunction(func)
7 |
--------------------------------------------------------------------------------
/requirements-dev.lock:
--------------------------------------------------------------------------------
1 | # generated by rye
2 | # use `rye lock` or `rye sync` to update this lockfile
3 | #
4 | # last locked with the following flags:
5 | # pre: false
6 | # features: []
7 | # all-features: true
8 | # with-sources: false
9 | # generate-hashes: true
10 | # universal: true
11 |
12 | -e file:.
13 | annotated-types==0.7.0 \
14 | --hash=sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53 \
15 | --hash=sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89
16 | # via pydantic
17 | asgiref==3.8.1 \
18 | --hash=sha256:3e1e3ecc849832fe52ccf2cb6686b7a55f82bb1d6aee72a58826471390335e47 \
19 | --hash=sha256:c343bd80a0bec947a9860adb4c432ffa7db769836c64238fc34bdc3fec84d590
20 | # via django
21 | asttokens==2.4.1 \
22 | --hash=sha256:051ed49c3dcae8913ea7cd08e46a606dba30b79993209636c4875bc1d637bc24 \
23 | --hash=sha256:b03869718ba9a6eb027e134bfdf69f38a236d681c83c160d510768af11254ba0
24 | # via devtools
25 | # via stack-data
26 | colorama==0.4.6 ; sys_platform == 'win32' \
27 | --hash=sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44 \
28 | --hash=sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6
29 | # via ipython
30 | # via pytest
31 | decorator==5.1.1 \
32 | --hash=sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330 \
33 | --hash=sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186
34 | # via ipdb
35 | # via ipython
36 | devtools==0.12.2 \
37 | --hash=sha256:c366e3de1df4cdd635f1ad8cbcd3af01a384d7abda71900e68d43b04eb6aaca7 \
38 | --hash=sha256:efceab184cb35e3a11fa8e602cc4fadacaa2e859e920fc6f87bf130b69885507
39 | django==5.1 \
40 | --hash=sha256:848a5980e8efb76eea70872fb0e4bc5e371619c70fffbe48e3e1b50b2c09455d \
41 | --hash=sha256:d3b811bf5371a26def053d7ee42a9df1267ef7622323fe70a601936725aa4557
42 | execnet==2.1.1 \
43 | --hash=sha256:26dee51f1b80cebd6d0ca8e74dd8745419761d3bef34163928cbebbdc4749fdc \
44 | --hash=sha256:5189b52c6121c24feae288166ab41b32549c7e2348652736540b9e6e7d4e72e3
45 | # via pytest-xdist
46 | executing==2.0.1 \
47 | --hash=sha256:35afe2ce3affba8ee97f2d69927fa823b08b472b7b994e36a52a964b93d16147 \
48 | --hash=sha256:eac49ca94516ccc753f9fb5ce82603156e590b27525a8bc32cce8ae302eb61bc
49 | # via devtools
50 | # via stack-data
51 | iniconfig==2.0.0 \
52 | --hash=sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3 \
53 | --hash=sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374
54 | # via pytest
55 | ipdb==0.13.13 \
56 | --hash=sha256:45529994741c4ab6d2388bfa5d7b725c2cf7fe9deffabdb8a6113aa5ed449ed4 \
57 | --hash=sha256:e3ac6018ef05126d442af680aad863006ec19d02290561ac88b8b1c0b0cfc726
58 | ipython==8.26.0 \
59 | --hash=sha256:1cec0fbba8404af13facebe83d04436a7434c7400e59f47acf467c64abd0956c \
60 | --hash=sha256:e6b347c27bdf9c32ee9d31ae85defc525755a1869f14057e900675b9e8d6e6ff
61 | # via ipdb
62 | jedi==0.19.1 \
63 | --hash=sha256:cf0496f3651bc65d7174ac1b7d043eff454892c708a87d1b683e57b569927ffd \
64 | --hash=sha256:e983c654fe5c02867aef4cdfce5a2fbb4a50adc0af145f70504238f18ef5e7e0
65 | # via ipython
66 | markdown-it-py==3.0.0 \
67 | --hash=sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1 \
68 | --hash=sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb
69 | # via rich
70 | matplotlib-inline==0.1.7 \
71 | --hash=sha256:8423b23ec666be3d16e16b60bdd8ac4e86e840ebd1dd11a30b9f117f2fa0ab90 \
72 | --hash=sha256:df192d39a4ff8f21b1895d72e6a13f5fcc5099f00fa84384e0ea28c2cc0653ca
73 | # via ipython
74 | mdurl==0.1.2 \
75 | --hash=sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8 \
76 | --hash=sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba
77 | # via markdown-it-py
78 | mypy==1.11.1 \
79 | --hash=sha256:0624bdb940255d2dd24e829d99a13cfeb72e4e9031f9492148f410ed30bcab54 \
80 | --hash=sha256:0bc71d1fb27a428139dd78621953effe0d208aed9857cb08d002280b0422003a \
81 | --hash=sha256:0bd53faf56de9643336aeea1c925012837432b5faf1701ccca7fde70166ccf72 \
82 | --hash=sha256:11965c2f571ded6239977b14deebd3f4c3abd9a92398712d6da3a772974fad69 \
83 | --hash=sha256:1a81cf05975fd61aec5ae16501a091cfb9f605dc3e3c878c0da32f250b74760b \
84 | --hash=sha256:2684d3f693073ab89d76da8e3921883019ea8a3ec20fa5d8ecca6a2db4c54bbe \
85 | --hash=sha256:2c63350af88f43a66d3dfeeeb8d77af34a4f07d760b9eb3a8697f0386c7590b4 \
86 | --hash=sha256:45df906e8b6804ef4b666af29a87ad9f5921aad091c79cc38e12198e220beabd \
87 | --hash=sha256:4c956b49c5d865394d62941b109728c5c596a415e9c5b2be663dd26a1ff07bc0 \
88 | --hash=sha256:64f4a90e3ea07f590c5bcf9029035cf0efeae5ba8be511a8caada1a4893f5525 \
89 | --hash=sha256:749fd3213916f1751fff995fccf20c6195cae941dc968f3aaadf9bb4e430e5a2 \
90 | --hash=sha256:79c07eb282cb457473add5052b63925e5cc97dfab9812ee65a7c7ab5e3cb551c \
91 | --hash=sha256:7b6343d338390bb946d449677726edf60102a1c96079b4f002dedff375953fc5 \
92 | --hash=sha256:886c9dbecc87b9516eff294541bf7f3655722bf22bb898ee06985cd7269898de \
93 | --hash=sha256:a2b43895a0f8154df6519706d9bca8280cda52d3d9d1514b2d9c3e26792a0b74 \
94 | --hash=sha256:a32fc80b63de4b5b3e65f4be82b4cfa362a46702672aa6a0f443b4689af7008c \
95 | --hash=sha256:a707ec1527ffcdd1c784d0924bf5cb15cd7f22683b919668a04d2b9c34549d2e \
96 | --hash=sha256:a831671bad47186603872a3abc19634f3011d7f83b083762c942442d51c58d58 \
97 | --hash=sha256:b639dce63a0b19085213ec5fdd8cffd1d81988f47a2dec7100e93564f3e8fb3b \
98 | --hash=sha256:b868d3bcff720dd7217c383474008ddabaf048fad8d78ed948bb4b624870a417 \
99 | --hash=sha256:c1952f5ea8a5a959b05ed5f16452fddadbaae48b5d39235ab4c3fc444d5fd411 \
100 | --hash=sha256:d44be7551689d9d47b7abc27c71257adfdb53f03880841a5db15ddb22dc63edb \
101 | --hash=sha256:e1e30dc3bfa4e157e53c1d17a0dad20f89dc433393e7702b813c10e200843b03 \
102 | --hash=sha256:e4fe9f4e5e521b458d8feb52547f4bade7ef8c93238dfb5bbc790d9ff2d770ca \
103 | --hash=sha256:f39918a50f74dc5969807dcfaecafa804fa7f90c9d60506835036cc1bc891dc8 \
104 | --hash=sha256:f404a0b069709f18bbdb702eb3dcfe51910602995de00bd39cea3050b5772d08 \
105 | --hash=sha256:fca4a60e1dd9fd0193ae0067eaeeb962f2d79e0d9f0f66223a0682f26ffcc809
106 | mypy-extensions==1.0.0 \
107 | --hash=sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d \
108 | --hash=sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782
109 | # via mypy
110 | nodeenv==1.9.1 \
111 | --hash=sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f \
112 | --hash=sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9
113 | # via pyright
114 | packaging==24.1 \
115 | --hash=sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002 \
116 | --hash=sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124
117 | # via pytest
118 | parso==0.8.4 \
119 | --hash=sha256:a418670a20291dacd2dddc80c377c5c3791378ee1e8d12bffc35420643d43f18 \
120 | --hash=sha256:eb3a7b58240fb99099a345571deecc0f9540ea5f4dd2fe14c2a99d6b281ab92d
121 | # via jedi
122 | pexpect==4.9.0 ; sys_platform != 'emscripten' and sys_platform != 'win32' \
123 | --hash=sha256:7236d1e080e4936be2dc3e326cec0af72acf9212a7e1d060210e70a47e253523 \
124 | --hash=sha256:ee7d41123f3c9911050ea2c2dac107568dc43b2d3b0c7557a33212c398ead30f
125 | # via ipython
126 | pluggy==1.5.0 \
127 | --hash=sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1 \
128 | --hash=sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669
129 | # via pytest
130 | prompt-toolkit==3.0.47 \
131 | --hash=sha256:0d7bfa67001d5e39d02c224b663abc33687405033a8c422d0d675a5a13361d10 \
132 | --hash=sha256:1e1b29cb58080b1e69f207c893a1a7bf16d127a5c30c9d17a25a5d77792e5360
133 | # via ipython
134 | ptyprocess==0.7.0 ; sys_platform != 'emscripten' and sys_platform != 'win32' \
135 | --hash=sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35 \
136 | --hash=sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220
137 | # via pexpect
138 | pure-eval==0.2.3 \
139 | --hash=sha256:1db8e35b67b3d218d818ae653e27f06c3aa420901fa7b081ca98cbedc874e0d0 \
140 | --hash=sha256:5f4e983f40564c576c7c8635ae88db5956bb2229d7e9237d03b3c0b0190eaf42
141 | # via stack-data
142 | pydantic==2.8.2 \
143 | --hash=sha256:6f62c13d067b0755ad1c21a34bdd06c0c12625a22b0fc09c6b149816604f7c2a \
144 | --hash=sha256:73ee9fddd406dc318b885c7a2eab8a6472b68b8fb5ba8150949fc3db939f23c8
145 | # via pytest-scrutinize
146 | pydantic-core==2.20.1 \
147 | --hash=sha256:035ede2e16da7281041f0e626459bcae33ed998cca6a0a007a5ebb73414ac72d \
148 | --hash=sha256:04024d270cf63f586ad41fff13fde4311c4fc13ea74676962c876d9577bcc78f \
149 | --hash=sha256:0827505a5c87e8aa285dc31e9ec7f4a17c81a813d45f70b1d9164e03a813a686 \
150 | --hash=sha256:084659fac3c83fd674596612aeff6041a18402f1e1bc19ca39e417d554468482 \
151 | --hash=sha256:10d4204d8ca33146e761c79f83cc861df20e7ae9f6487ca290a97702daf56006 \
152 | --hash=sha256:11b71d67b4725e7e2a9f6e9c0ac1239bbc0c48cce3dc59f98635efc57d6dac83 \
153 | --hash=sha256:150906b40ff188a3260cbee25380e7494ee85048584998c1e66df0c7a11c17a6 \
154 | --hash=sha256:175873691124f3d0da55aeea1d90660a6ea7a3cfea137c38afa0a5ffabe37b88 \
155 | --hash=sha256:177f55a886d74f1808763976ac4efd29b7ed15c69f4d838bbd74d9d09cf6fa86 \
156 | --hash=sha256:19c0fa39fa154e7e0b7f82f88ef85faa2a4c23cc65aae2f5aea625e3c13c735a \
157 | --hash=sha256:1eedfeb6089ed3fad42e81a67755846ad4dcc14d73698c120a82e4ccf0f1f9f6 \
158 | --hash=sha256:225b67a1f6d602de0ce7f6c1c3ae89a4aa25d3de9be857999e9124f15dab486a \
159 | --hash=sha256:242b8feb3c493ab78be289c034a1f659e8826e2233786e36f2893a950a719bb6 \
160 | --hash=sha256:254ec27fdb5b1ee60684f91683be95e5133c994cc54e86a0b0963afa25c8f8a6 \
161 | --hash=sha256:25e9185e2d06c16ee438ed39bf62935ec436474a6ac4f9358524220f1b236e43 \
162 | --hash=sha256:26ab812fa0c845df815e506be30337e2df27e88399b985d0bb4e3ecfe72df31c \
163 | --hash=sha256:26ca695eeee5f9f1aeeb211ffc12f10bcb6f71e2989988fda61dabd65db878d4 \
164 | --hash=sha256:26dc97754b57d2fd00ac2b24dfa341abffc380b823211994c4efac7f13b9e90e \
165 | --hash=sha256:270755f15174fb983890c49881e93f8f1b80f0b5e3a3cc1394a255706cabd203 \
166 | --hash=sha256:2aafc5a503855ea5885559eae883978c9b6d8c8993d67766ee73d82e841300dd \
167 | --hash=sha256:2d036c7187b9422ae5b262badb87a20a49eb6c5238b2004e96d4da1231badef1 \
168 | --hash=sha256:33499e85e739a4b60c9dac710c20a08dc73cb3240c9a0e22325e671b27b70d24 \
169 | --hash=sha256:37eee5b638f0e0dcd18d21f59b679686bbd18917b87db0193ae36f9c23c355fc \
170 | --hash=sha256:38cf1c40a921d05c5edc61a785c0ddb4bed67827069f535d794ce6bcded919fc \
171 | --hash=sha256:3acae97ffd19bf091c72df4d726d552c473f3576409b2a7ca36b2f535ffff4a3 \
172 | --hash=sha256:3c5ebac750d9d5f2706654c638c041635c385596caf68f81342011ddfa1e5598 \
173 | --hash=sha256:3d482efec8b7dc6bfaedc0f166b2ce349df0011f5d2f1f25537ced4cfc34fd98 \
174 | --hash=sha256:407653af5617f0757261ae249d3fba09504d7a71ab36ac057c938572d1bc9331 \
175 | --hash=sha256:40a783fb7ee353c50bd3853e626f15677ea527ae556429453685ae32280c19c2 \
176 | --hash=sha256:41e81317dd6a0127cabce83c0c9c3fbecceae981c8391e6f1dec88a77c8a569a \
177 | --hash=sha256:41f4c96227a67a013e7de5ff8f20fb496ce573893b7f4f2707d065907bffdbd6 \
178 | --hash=sha256:469f29f9093c9d834432034d33f5fe45699e664f12a13bf38c04967ce233d688 \
179 | --hash=sha256:4745f4ac52cc6686390c40eaa01d48b18997cb130833154801a442323cc78f91 \
180 | --hash=sha256:4868f6bd7c9d98904b748a2653031fc9c2f85b6237009d475b1008bfaeb0a5aa \
181 | --hash=sha256:4aa223cd1e36b642092c326d694d8bf59b71ddddc94cdb752bbbb1c5c91d833b \
182 | --hash=sha256:4dd484681c15e6b9a977c785a345d3e378d72678fd5f1f3c0509608da24f2ac0 \
183 | --hash=sha256:4f2790949cf385d985a31984907fecb3896999329103df4e4983a4a41e13e840 \
184 | --hash=sha256:512ecfbefef6dac7bc5eaaf46177b2de58cdf7acac8793fe033b24ece0b9566c \
185 | --hash=sha256:516d9227919612425c8ef1c9b869bbbee249bc91912c8aaffb66116c0b447ebd \
186 | --hash=sha256:53e431da3fc53360db73eedf6f7124d1076e1b4ee4276b36fb25514544ceb4a3 \
187 | --hash=sha256:595ba5be69b35777474fa07f80fc260ea71255656191adb22a8c53aba4479231 \
188 | --hash=sha256:5b5ff4911aea936a47d9376fd3ab17e970cc543d1b68921886e7f64bd28308d1 \
189 | --hash=sha256:5d41e6daee2813ecceea8eda38062d69e280b39df793f5a942fa515b8ed67953 \
190 | --hash=sha256:5e999ba8dd90e93d57410c5e67ebb67ffcaadcea0ad973240fdfd3a135506250 \
191 | --hash=sha256:5f239eb799a2081495ea659d8d4a43a8f42cd1fe9ff2e7e436295c38a10c286a \
192 | --hash=sha256:635fee4e041ab9c479e31edda27fcf966ea9614fff1317e280d99eb3e5ab6fe2 \
193 | --hash=sha256:65db0f2eefcaad1a3950f498aabb4875c8890438bc80b19362cf633b87a8ab20 \
194 | --hash=sha256:6b507132dcfc0dea440cce23ee2182c0ce7aba7054576efc65634f080dbe9434 \
195 | --hash=sha256:6b9d9bb600328a1ce523ab4f454859e9d439150abb0906c5a1983c146580ebab \
196 | --hash=sha256:70c8daf4faca8da5a6d655f9af86faf6ec2e1768f4b8b9d0226c02f3d6209703 \
197 | --hash=sha256:77bf3ac639c1ff567ae3b47f8d4cc3dc20f9966a2a6dd2311dcc055d3d04fb8a \
198 | --hash=sha256:784c1214cb6dd1e3b15dd8b91b9a53852aed16671cc3fbe4786f4f1db07089e2 \
199 | --hash=sha256:7eb6a0587eded33aeefea9f916899d42b1799b7b14b8f8ff2753c0ac1741edac \
200 | --hash=sha256:7ed1b0132f24beeec5a78b67d9388656d03e6a7c837394f99257e2d55b461611 \
201 | --hash=sha256:8ad4aeb3e9a97286573c03df758fc7627aecdd02f1da04516a86dc159bf70121 \
202 | --hash=sha256:964faa8a861d2664f0c7ab0c181af0bea66098b1919439815ca8803ef136fc4e \
203 | --hash=sha256:9dc1b507c12eb0481d071f3c1808f0529ad41dc415d0ca11f7ebfc666e66a18b \
204 | --hash=sha256:9ebfef07dbe1d93efb94b4700f2d278494e9162565a54f124c404a5656d7ff09 \
205 | --hash=sha256:a45f84b09ac9c3d35dfcf6a27fd0634d30d183205230a0ebe8373a0e8cfa0906 \
206 | --hash=sha256:a4f55095ad087474999ee28d3398bae183a66be4823f753cd7d67dd0153427c9 \
207 | --hash=sha256:a6d511cc297ff0883bc3708b465ff82d7560193169a8b93260f74ecb0a5e08a7 \
208 | --hash=sha256:a8ad4c766d3f33ba8fd692f9aa297c9058970530a32c728a2c4bfd2616d3358b \
209 | --hash=sha256:aa2f457b4af386254372dfa78a2eda2563680d982422641a85f271c859df1987 \
210 | --hash=sha256:b03f7941783b4c4a26051846dea594628b38f6940a2fdc0df00b221aed39314c \
211 | --hash=sha256:b0dae11d8f5ded51699c74d9548dcc5938e0804cc8298ec0aa0da95c21fff57b \
212 | --hash=sha256:b91ced227c41aa29c672814f50dbb05ec93536abf8f43cd14ec9521ea09afe4e \
213 | --hash=sha256:bc633a9fe1eb87e250b5c57d389cf28998e4292336926b0b6cdaee353f89a237 \
214 | --hash=sha256:bebb4d6715c814597f85297c332297c6ce81e29436125ca59d1159b07f423eb1 \
215 | --hash=sha256:c336a6d235522a62fef872c6295a42ecb0c4e1d0f1a3e500fe949415761b8a19 \
216 | --hash=sha256:c6514f963b023aeee506678a1cf821fe31159b925c4b76fe2afa94cc70b3222b \
217 | --hash=sha256:c693e916709c2465b02ca0ad7b387c4f8423d1db7b4649c551f27a529181c5ad \
218 | --hash=sha256:c81131869240e3e568916ef4c307f8b99583efaa60a8112ef27a366eefba8ef0 \
219 | --hash=sha256:d02a72df14dfdbaf228424573a07af10637bd490f0901cee872c4f434a735b94 \
220 | --hash=sha256:d2a8fa9d6d6f891f3deec72f5cc668e6f66b188ab14bb1ab52422fe8e644f312 \
221 | --hash=sha256:d2b27e6af28f07e2f195552b37d7d66b150adbaa39a6d327766ffd695799780f \
222 | --hash=sha256:d2fe69c5434391727efa54b47a1e7986bb0186e72a41b203df8f5b0a19a4f669 \
223 | --hash=sha256:d3f3ed29cd9f978c604708511a1f9c2fdcb6c38b9aae36a51905b8811ee5cbf1 \
224 | --hash=sha256:d573faf8eb7e6b1cbbcb4f5b247c60ca8be39fe2c674495df0eb4318303137fe \
225 | --hash=sha256:e0bbdd76ce9aa5d4209d65f2b27fc6e5ef1312ae6c5333c26db3f5ade53a1e99 \
226 | --hash=sha256:e7c4ea22b6739b162c9ecaaa41d718dfad48a244909fe7ef4b54c0b530effc5a \
227 | --hash=sha256:e93e1a4b4b33daed65d781a57a522ff153dcf748dee70b40c7258c5861e1768a \
228 | --hash=sha256:e97fdf088d4b31ff4ba35db26d9cc472ac7ef4a2ff2badeabf8d727b3377fc52 \
229 | --hash=sha256:e9fa4c9bf273ca41f940bceb86922a7667cd5bf90e95dbb157cbb8441008482c \
230 | --hash=sha256:eaad4ff2de1c3823fddf82f41121bdf453d922e9a238642b1dedb33c4e4f98ad \
231 | --hash=sha256:f1f62b2413c3a0e846c3b838b2ecd6c7a19ec6793b2a522745b0869e37ab5bc1 \
232 | --hash=sha256:f6d6cff3538391e8486a431569b77921adfcdef14eb18fbf19b7c0a5294d4e6a \
233 | --hash=sha256:f9aa05d09ecf4c75157197f27cdc9cfaeb7c5f15021c6373932bf3e124af029f \
234 | --hash=sha256:fa2fddcb7107e0d1808086ca306dcade7df60a13a6c347a7acf1ec139aa6789a \
235 | --hash=sha256:faa6b09ee09433b87992fb5a2859efd1c264ddc37280d2dd5db502126d0e7f27
236 | # via pydantic
237 | pygments==2.18.0 \
238 | --hash=sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199 \
239 | --hash=sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a
240 | # via devtools
241 | # via ipython
242 | # via rich
243 | pyright==1.1.376 \
244 | --hash=sha256:0f2473b12c15c46b3207f0eec224c3cea2bdc07cd45dd4a037687cbbca0fbeff \
245 | --hash=sha256:bffd63b197cd0810395bb3245c06b01f95a85ddf6bfa0e5644ed69c841e954dd
246 | pytest==8.3.2 \
247 | --hash=sha256:4ba08f9ae7dcf84ded419494d229b48d0903ea6407b030eaec46df5e6a73bba5 \
248 | --hash=sha256:c132345d12ce551242c87269de812483f5bcc87cdbb4722e48487ba194f9fdce
249 | # via pytest-django
250 | # via pytest-pretty
251 | # via pytest-scrutinize
252 | # via pytest-xdist
253 | pytest-django==4.8.0 \
254 | --hash=sha256:5d054fe011c56f3b10f978f41a8efb2e5adfc7e680ef36fb571ada1f24779d90 \
255 | --hash=sha256:ca1ddd1e0e4c227cf9e3e40a6afc6d106b3e70868fd2ac5798a22501271cd0c7
256 | pytest-pretty==1.2.0 \
257 | --hash=sha256:105a355f128e392860ad2c478ae173ff96d2f03044692f9818ff3d49205d3a60 \
258 | --hash=sha256:6f79122bf53864ae2951b6c9e94d7a06a87ef753476acd4588aeac018f062036
259 | pytest-xdist==3.6.1 \
260 | --hash=sha256:9ed4adfb68a016610848639bb7e02c9352d5d9f03d04809919e2dafc3be4cca7 \
261 | --hash=sha256:ead156a4db231eec769737f57668ef58a2084a34b2e55c4a8fa20d861107300d
262 | rich==13.7.1 \
263 | --hash=sha256:4edbae314f59eb482f54e9e30bf00d33350aaa94f4bfcd4e9e3110e64d0d7222 \
264 | --hash=sha256:9be308cb1fe2f1f57d67ce99e95af38a1e2bc71ad9813b0e247cf7ffbcc3a432
265 | # via pytest-pretty
266 | six==1.16.0 \
267 | --hash=sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926 \
268 | --hash=sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254
269 | # via asttokens
270 | sqlparse==0.5.1 \
271 | --hash=sha256:773dcbf9a5ab44a090f3441e2180efe2560220203dc2f8c0b0fa141e18b505e4 \
272 | --hash=sha256:bb6b4df465655ef332548e24f08e205afc81b9ab86cb1c45657a7ff173a3a00e
273 | # via django
274 | stack-data==0.6.3 \
275 | --hash=sha256:836a778de4fec4dcd1dcd89ed8abff8a221f58308462e1c4aa2a3cf30148f0b9 \
276 | --hash=sha256:d5558e0c25a4cb0853cddad3d77da9891a08cb85dd9f9f91b9f8cd66e511e695
277 | # via ipython
278 | traitlets==5.14.3 \
279 | --hash=sha256:9ed0579d3502c94b4b3732ac120375cda96f923114522847de4b3bb98b96b6b7 \
280 | --hash=sha256:b74e89e397b1ed28cc831db7aea759ba6640cb3de13090ca145426688ff1ac4f
281 | # via ipython
282 | # via matplotlib-inline
283 | typing-extensions==4.12.2 \
284 | --hash=sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d \
285 | --hash=sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8
286 | # via mypy
287 | # via pydantic
288 | # via pydantic-core
289 | tzdata==2024.1 ; sys_platform == 'win32' \
290 | --hash=sha256:2674120f8d891909751c38abcdfd386ac0a5a1127954fbc332af6b5ceae07efd \
291 | --hash=sha256:9068bc196136463f5245e51efda838afa15aaeca9903f49050dfa2679db4d252
292 | # via django
293 | wcwidth==0.2.13 \
294 | --hash=sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859 \
295 | --hash=sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5
296 | # via prompt-toolkit
297 |
--------------------------------------------------------------------------------
/requirements.lock:
--------------------------------------------------------------------------------
1 | # generated by rye
2 | # use `rye lock` or `rye sync` to update this lockfile
3 | #
4 | # last locked with the following flags:
5 | # pre: false
6 | # features: []
7 | # all-features: true
8 | # with-sources: false
9 | # generate-hashes: true
10 | # universal: true
11 |
12 | -e file:.
13 | annotated-types==0.7.0 \
14 | --hash=sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53 \
15 | --hash=sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89
16 | # via pydantic
17 | colorama==0.4.6 ; sys_platform == 'win32' \
18 | --hash=sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44 \
19 | --hash=sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6
20 | # via pytest
21 | iniconfig==2.0.0 \
22 | --hash=sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3 \
23 | --hash=sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374
24 | # via pytest
25 | packaging==24.1 \
26 | --hash=sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002 \
27 | --hash=sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124
28 | # via pytest
29 | pluggy==1.5.0 \
30 | --hash=sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1 \
31 | --hash=sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669
32 | # via pytest
33 | pydantic==2.8.2 \
34 | --hash=sha256:6f62c13d067b0755ad1c21a34bdd06c0c12625a22b0fc09c6b149816604f7c2a \
35 | --hash=sha256:73ee9fddd406dc318b885c7a2eab8a6472b68b8fb5ba8150949fc3db939f23c8
36 | # via pytest-scrutinize
37 | pydantic-core==2.20.1 \
38 | --hash=sha256:035ede2e16da7281041f0e626459bcae33ed998cca6a0a007a5ebb73414ac72d \
39 | --hash=sha256:04024d270cf63f586ad41fff13fde4311c4fc13ea74676962c876d9577bcc78f \
40 | --hash=sha256:0827505a5c87e8aa285dc31e9ec7f4a17c81a813d45f70b1d9164e03a813a686 \
41 | --hash=sha256:084659fac3c83fd674596612aeff6041a18402f1e1bc19ca39e417d554468482 \
42 | --hash=sha256:10d4204d8ca33146e761c79f83cc861df20e7ae9f6487ca290a97702daf56006 \
43 | --hash=sha256:11b71d67b4725e7e2a9f6e9c0ac1239bbc0c48cce3dc59f98635efc57d6dac83 \
44 | --hash=sha256:150906b40ff188a3260cbee25380e7494ee85048584998c1e66df0c7a11c17a6 \
45 | --hash=sha256:175873691124f3d0da55aeea1d90660a6ea7a3cfea137c38afa0a5ffabe37b88 \
46 | --hash=sha256:177f55a886d74f1808763976ac4efd29b7ed15c69f4d838bbd74d9d09cf6fa86 \
47 | --hash=sha256:19c0fa39fa154e7e0b7f82f88ef85faa2a4c23cc65aae2f5aea625e3c13c735a \
48 | --hash=sha256:1eedfeb6089ed3fad42e81a67755846ad4dcc14d73698c120a82e4ccf0f1f9f6 \
49 | --hash=sha256:225b67a1f6d602de0ce7f6c1c3ae89a4aa25d3de9be857999e9124f15dab486a \
50 | --hash=sha256:242b8feb3c493ab78be289c034a1f659e8826e2233786e36f2893a950a719bb6 \
51 | --hash=sha256:254ec27fdb5b1ee60684f91683be95e5133c994cc54e86a0b0963afa25c8f8a6 \
52 | --hash=sha256:25e9185e2d06c16ee438ed39bf62935ec436474a6ac4f9358524220f1b236e43 \
53 | --hash=sha256:26ab812fa0c845df815e506be30337e2df27e88399b985d0bb4e3ecfe72df31c \
54 | --hash=sha256:26ca695eeee5f9f1aeeb211ffc12f10bcb6f71e2989988fda61dabd65db878d4 \
55 | --hash=sha256:26dc97754b57d2fd00ac2b24dfa341abffc380b823211994c4efac7f13b9e90e \
56 | --hash=sha256:270755f15174fb983890c49881e93f8f1b80f0b5e3a3cc1394a255706cabd203 \
57 | --hash=sha256:2aafc5a503855ea5885559eae883978c9b6d8c8993d67766ee73d82e841300dd \
58 | --hash=sha256:2d036c7187b9422ae5b262badb87a20a49eb6c5238b2004e96d4da1231badef1 \
59 | --hash=sha256:33499e85e739a4b60c9dac710c20a08dc73cb3240c9a0e22325e671b27b70d24 \
60 | --hash=sha256:37eee5b638f0e0dcd18d21f59b679686bbd18917b87db0193ae36f9c23c355fc \
61 | --hash=sha256:38cf1c40a921d05c5edc61a785c0ddb4bed67827069f535d794ce6bcded919fc \
62 | --hash=sha256:3acae97ffd19bf091c72df4d726d552c473f3576409b2a7ca36b2f535ffff4a3 \
63 | --hash=sha256:3c5ebac750d9d5f2706654c638c041635c385596caf68f81342011ddfa1e5598 \
64 | --hash=sha256:3d482efec8b7dc6bfaedc0f166b2ce349df0011f5d2f1f25537ced4cfc34fd98 \
65 | --hash=sha256:407653af5617f0757261ae249d3fba09504d7a71ab36ac057c938572d1bc9331 \
66 | --hash=sha256:40a783fb7ee353c50bd3853e626f15677ea527ae556429453685ae32280c19c2 \
67 | --hash=sha256:41e81317dd6a0127cabce83c0c9c3fbecceae981c8391e6f1dec88a77c8a569a \
68 | --hash=sha256:41f4c96227a67a013e7de5ff8f20fb496ce573893b7f4f2707d065907bffdbd6 \
69 | --hash=sha256:469f29f9093c9d834432034d33f5fe45699e664f12a13bf38c04967ce233d688 \
70 | --hash=sha256:4745f4ac52cc6686390c40eaa01d48b18997cb130833154801a442323cc78f91 \
71 | --hash=sha256:4868f6bd7c9d98904b748a2653031fc9c2f85b6237009d475b1008bfaeb0a5aa \
72 | --hash=sha256:4aa223cd1e36b642092c326d694d8bf59b71ddddc94cdb752bbbb1c5c91d833b \
73 | --hash=sha256:4dd484681c15e6b9a977c785a345d3e378d72678fd5f1f3c0509608da24f2ac0 \
74 | --hash=sha256:4f2790949cf385d985a31984907fecb3896999329103df4e4983a4a41e13e840 \
75 | --hash=sha256:512ecfbefef6dac7bc5eaaf46177b2de58cdf7acac8793fe033b24ece0b9566c \
76 | --hash=sha256:516d9227919612425c8ef1c9b869bbbee249bc91912c8aaffb66116c0b447ebd \
77 | --hash=sha256:53e431da3fc53360db73eedf6f7124d1076e1b4ee4276b36fb25514544ceb4a3 \
78 | --hash=sha256:595ba5be69b35777474fa07f80fc260ea71255656191adb22a8c53aba4479231 \
79 | --hash=sha256:5b5ff4911aea936a47d9376fd3ab17e970cc543d1b68921886e7f64bd28308d1 \
80 | --hash=sha256:5d41e6daee2813ecceea8eda38062d69e280b39df793f5a942fa515b8ed67953 \
81 | --hash=sha256:5e999ba8dd90e93d57410c5e67ebb67ffcaadcea0ad973240fdfd3a135506250 \
82 | --hash=sha256:5f239eb799a2081495ea659d8d4a43a8f42cd1fe9ff2e7e436295c38a10c286a \
83 | --hash=sha256:635fee4e041ab9c479e31edda27fcf966ea9614fff1317e280d99eb3e5ab6fe2 \
84 | --hash=sha256:65db0f2eefcaad1a3950f498aabb4875c8890438bc80b19362cf633b87a8ab20 \
85 | --hash=sha256:6b507132dcfc0dea440cce23ee2182c0ce7aba7054576efc65634f080dbe9434 \
86 | --hash=sha256:6b9d9bb600328a1ce523ab4f454859e9d439150abb0906c5a1983c146580ebab \
87 | --hash=sha256:70c8daf4faca8da5a6d655f9af86faf6ec2e1768f4b8b9d0226c02f3d6209703 \
88 | --hash=sha256:77bf3ac639c1ff567ae3b47f8d4cc3dc20f9966a2a6dd2311dcc055d3d04fb8a \
89 | --hash=sha256:784c1214cb6dd1e3b15dd8b91b9a53852aed16671cc3fbe4786f4f1db07089e2 \
90 | --hash=sha256:7eb6a0587eded33aeefea9f916899d42b1799b7b14b8f8ff2753c0ac1741edac \
91 | --hash=sha256:7ed1b0132f24beeec5a78b67d9388656d03e6a7c837394f99257e2d55b461611 \
92 | --hash=sha256:8ad4aeb3e9a97286573c03df758fc7627aecdd02f1da04516a86dc159bf70121 \
93 | --hash=sha256:964faa8a861d2664f0c7ab0c181af0bea66098b1919439815ca8803ef136fc4e \
94 | --hash=sha256:9dc1b507c12eb0481d071f3c1808f0529ad41dc415d0ca11f7ebfc666e66a18b \
95 | --hash=sha256:9ebfef07dbe1d93efb94b4700f2d278494e9162565a54f124c404a5656d7ff09 \
96 | --hash=sha256:a45f84b09ac9c3d35dfcf6a27fd0634d30d183205230a0ebe8373a0e8cfa0906 \
97 | --hash=sha256:a4f55095ad087474999ee28d3398bae183a66be4823f753cd7d67dd0153427c9 \
98 | --hash=sha256:a6d511cc297ff0883bc3708b465ff82d7560193169a8b93260f74ecb0a5e08a7 \
99 | --hash=sha256:a8ad4c766d3f33ba8fd692f9aa297c9058970530a32c728a2c4bfd2616d3358b \
100 | --hash=sha256:aa2f457b4af386254372dfa78a2eda2563680d982422641a85f271c859df1987 \
101 | --hash=sha256:b03f7941783b4c4a26051846dea594628b38f6940a2fdc0df00b221aed39314c \
102 | --hash=sha256:b0dae11d8f5ded51699c74d9548dcc5938e0804cc8298ec0aa0da95c21fff57b \
103 | --hash=sha256:b91ced227c41aa29c672814f50dbb05ec93536abf8f43cd14ec9521ea09afe4e \
104 | --hash=sha256:bc633a9fe1eb87e250b5c57d389cf28998e4292336926b0b6cdaee353f89a237 \
105 | --hash=sha256:bebb4d6715c814597f85297c332297c6ce81e29436125ca59d1159b07f423eb1 \
106 | --hash=sha256:c336a6d235522a62fef872c6295a42ecb0c4e1d0f1a3e500fe949415761b8a19 \
107 | --hash=sha256:c6514f963b023aeee506678a1cf821fe31159b925c4b76fe2afa94cc70b3222b \
108 | --hash=sha256:c693e916709c2465b02ca0ad7b387c4f8423d1db7b4649c551f27a529181c5ad \
109 | --hash=sha256:c81131869240e3e568916ef4c307f8b99583efaa60a8112ef27a366eefba8ef0 \
110 | --hash=sha256:d02a72df14dfdbaf228424573a07af10637bd490f0901cee872c4f434a735b94 \
111 | --hash=sha256:d2a8fa9d6d6f891f3deec72f5cc668e6f66b188ab14bb1ab52422fe8e644f312 \
112 | --hash=sha256:d2b27e6af28f07e2f195552b37d7d66b150adbaa39a6d327766ffd695799780f \
113 | --hash=sha256:d2fe69c5434391727efa54b47a1e7986bb0186e72a41b203df8f5b0a19a4f669 \
114 | --hash=sha256:d3f3ed29cd9f978c604708511a1f9c2fdcb6c38b9aae36a51905b8811ee5cbf1 \
115 | --hash=sha256:d573faf8eb7e6b1cbbcb4f5b247c60ca8be39fe2c674495df0eb4318303137fe \
116 | --hash=sha256:e0bbdd76ce9aa5d4209d65f2b27fc6e5ef1312ae6c5333c26db3f5ade53a1e99 \
117 | --hash=sha256:e7c4ea22b6739b162c9ecaaa41d718dfad48a244909fe7ef4b54c0b530effc5a \
118 | --hash=sha256:e93e1a4b4b33daed65d781a57a522ff153dcf748dee70b40c7258c5861e1768a \
119 | --hash=sha256:e97fdf088d4b31ff4ba35db26d9cc472ac7ef4a2ff2badeabf8d727b3377fc52 \
120 | --hash=sha256:e9fa4c9bf273ca41f940bceb86922a7667cd5bf90e95dbb157cbb8441008482c \
121 | --hash=sha256:eaad4ff2de1c3823fddf82f41121bdf453d922e9a238642b1dedb33c4e4f98ad \
122 | --hash=sha256:f1f62b2413c3a0e846c3b838b2ecd6c7a19ec6793b2a522745b0869e37ab5bc1 \
123 | --hash=sha256:f6d6cff3538391e8486a431569b77921adfcdef14eb18fbf19b7c0a5294d4e6a \
124 | --hash=sha256:f9aa05d09ecf4c75157197f27cdc9cfaeb7c5f15021c6373932bf3e124af029f \
125 | --hash=sha256:fa2fddcb7107e0d1808086ca306dcade7df60a13a6c347a7acf1ec139aa6789a \
126 | --hash=sha256:faa6b09ee09433b87992fb5a2859efd1c264ddc37280d2dd5db502126d0e7f27
127 | # via pydantic
128 | pytest==8.3.2 \
129 | --hash=sha256:4ba08f9ae7dcf84ded419494d229b48d0903ea6407b030eaec46df5e6a73bba5 \
130 | --hash=sha256:c132345d12ce551242c87269de812483f5bcc87cdbb4722e48487ba194f9fdce
131 | # via pytest-scrutinize
132 | typing-extensions==4.12.2 \
133 | --hash=sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d \
134 | --hash=sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8
135 | # via pydantic
136 | # via pydantic-core
137 |
--------------------------------------------------------------------------------
/tests/conftest.py:
--------------------------------------------------------------------------------
1 | import gzip
2 | from pathlib import Path
3 | from typing import Callable
4 |
5 | import pytest
6 | from _pytest.pytester import RunResult, Pytester
7 |
8 | from pytest_scrutinize import Timing, TimingAdapter
9 |
10 | pytest_plugins = [
11 | "pytester",
12 | ]
13 |
14 |
15 | def read_results_file(path: Path) -> list[Timing]:
16 | with gzip.open(path, mode="rt") as fd:
17 | return [TimingAdapter.validate_json(line) for line in fd]
18 |
19 |
20 | @pytest.fixture(params=[True, False], ids=["xdist", "normal"])
21 | def with_xdist(request) -> bool:
22 | return request.param
23 |
24 |
25 | @pytest.fixture()
26 | def output_file(tmp_path) -> Path:
27 | return tmp_path / "output.jsonl.gz"
28 |
29 |
30 | @pytest.fixture()
31 | def run_tests(
32 | pytester_pretty: Pytester, with_xdist, output_file
33 | ) -> Callable[..., tuple[RunResult, list[Timing]]]:
34 | flags = []
35 | if with_xdist:
36 | flags.append("-n 2")
37 |
38 | def _run(test_name: str, *args: str):
39 | pytester_pretty.copy_example(test_name)
40 | result: RunResult = pytester_pretty.runpytest(
41 | "--scrutinize", output_file, *flags, *args
42 | )
43 | parsed_results = read_results_file(output_file)
44 | return result, parsed_results
45 |
46 | return _run
47 |
--------------------------------------------------------------------------------
/tests/django_app/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/orf/pytest-scrutinize/a9727cbfeabf69f07c2c8e383e7c3103679877fb/tests/django_app/__init__.py
--------------------------------------------------------------------------------
/tests/django_app/apps.py:
--------------------------------------------------------------------------------
1 | from django.apps import AppConfig
2 |
3 |
4 | class DjangoAppConfig(AppConfig):
5 | name = "tests.django_app"
6 |
--------------------------------------------------------------------------------
/tests/django_app/models.py:
--------------------------------------------------------------------------------
1 | from django.db import models
2 |
3 |
4 | class DummyModel(models.Model):
5 | foo = models.TextField()
6 |
--------------------------------------------------------------------------------
/tests/django_app/settings.py:
--------------------------------------------------------------------------------
1 | INSTALLED_APPS = [
2 | "django.contrib.admin",
3 | "django.contrib.auth",
4 | "django.contrib.contenttypes",
5 | "django.contrib.sessions",
6 | "django.contrib.messages",
7 | "django.contrib.staticfiles",
8 | "tests.django_app.apps.DjangoAppConfig",
9 | ]
10 |
11 | DATABASES = {
12 | "default": {
13 | "ENGINE": "django.db.backends.sqlite3",
14 | "NAME": ":memory:",
15 | }
16 | }
17 |
--------------------------------------------------------------------------------
/tests/examples/test_django.py:
--------------------------------------------------------------------------------
1 | import pytest
2 | from tests.django_app.models import DummyModel
3 |
4 |
5 | @pytest.fixture()
6 | def teardown_fixture():
7 | DummyModel.objects.create(foo="foobar")
8 | yield
9 | DummyModel.objects.create(foo="foobar")
10 |
11 |
12 | @pytest.fixture()
13 | def fixture():
14 | DummyModel.objects.create(foo="foobar")
15 |
16 |
17 | @pytest.mark.django_db
18 | def test_case(teardown_fixture, fixture):
19 | obj = DummyModel.objects.create(foo="foobar")
20 | assert DummyModel.objects.count() == 3
21 | assert len(list(DummyModel.objects.all())) == 3
22 | obj.delete()
23 |
--------------------------------------------------------------------------------
/tests/examples/test_mock.py:
--------------------------------------------------------------------------------
1 | import pytest
2 | from urllib import parse
3 |
4 | url = "https://google.com/foobar"
5 |
6 |
7 | def call_mocked_function() -> int:
8 | assert parse.urlparse(url).path == "/foobar"
9 | assert parse.quote("foo") == "foo"
10 | assert parse.parse_qs("foo=bar") == {"foo": ["bar"]}
11 | return 123
12 |
13 |
14 | @pytest.fixture()
15 | def indirect_fixture():
16 | call_mocked_function()
17 |
18 |
19 | @pytest.fixture()
20 | def teardown_fixture():
21 | call_mocked_function()
22 | yield
23 | call_mocked_function()
24 |
25 |
26 | @pytest.fixture()
27 | def fixture(request: pytest.FixtureRequest):
28 | request.getfixturevalue("indirect_fixture")
29 | call_mocked_function()
30 | return
31 |
32 |
33 | def test_case(teardown_fixture, fixture):
34 | call_mocked_function()
35 | assert True
36 |
--------------------------------------------------------------------------------
/tests/examples/test_simple.py:
--------------------------------------------------------------------------------
1 | import gc
2 |
3 | import pytest
4 |
5 |
6 | @pytest.fixture()
7 | def teardown_fixture():
8 | yield
9 |
10 |
11 | @pytest.fixture()
12 | def fixture():
13 | return
14 |
15 |
16 | def test_case(teardown_fixture, fixture):
17 | # Force a collection, for --scrutinize-gc tests
18 | gc.collect()
19 | assert True
20 |
--------------------------------------------------------------------------------
/tests/test_plugin.py:
--------------------------------------------------------------------------------
1 | import collections
2 | import typing
3 | from typing import Type, Hashable
4 |
5 | import pytest
6 | from _pytest.pytester import RunResult
7 |
8 | from pytest_scrutinize import (
9 | Timing,
10 | CollectionTiming,
11 | WorkerTiming,
12 | TestTiming as PyTestTiming,
13 | FixtureTiming,
14 | MockTiming,
15 | DjangoSQLTiming,
16 | GCTiming,
17 | )
18 | from pytest_scrutinize.timer import Duration
19 |
20 | T = typing.TypeVar("T", bound=Timing)
21 |
22 |
23 | def get_timing_items(results: list[Timing], cls: Type[T]) -> list[T]:
24 | return [result for result in results if isinstance(result, cls)]
25 |
26 |
27 | def assert_duration(duration: Duration | None):
28 | assert duration is not None
29 | assert duration.as_nanoseconds != 0
30 | assert duration.as_microseconds == duration.as_nanoseconds // 1_000
31 |
32 |
33 | H = typing.TypeVar("H", bound=Hashable)
34 |
35 |
36 | def assert_unique(items: typing.Iterable[H]) -> set[H]:
37 | items = list(items)
38 | unique = set(items)
39 | assert len(unique) == len(items)
40 | return unique
41 |
42 |
43 | def assert_not_master(results: list[T]):
44 | for result in results:
45 | assert result.meta.worker != "master"
46 |
47 |
48 | def assert_results_collection(results: list[Timing]):
49 | collection_times = get_timing_items(results, CollectionTiming)
50 | assert collection_times != []
51 | unique = assert_unique(ev.meta.worker for ev in collection_times)
52 | assert "master" in unique
53 |
54 | for collection in collection_times:
55 | assert_duration(collection.runtime)
56 |
57 |
58 | def assert_xdist_workers_ready(results: list[Timing]):
59 | worker_ready = get_timing_items(results, WorkerTiming)
60 | assert worker_ready != []
61 | assert_unique(ev.meta.worker for ev in worker_ready)
62 | assert_not_master(worker_ready)
63 | for ev in worker_ready:
64 | assert_duration(ev.ready)
65 | assert_duration(ev.runtime)
66 |
67 |
68 | def assert_tests(results: list[Timing], is_xdist: bool):
69 | test_timings = get_timing_items(results, PyTestTiming)
70 | assert test_timings != []
71 | assert_unique(ev.test_id for ev in test_timings)
72 |
73 | if is_xdist:
74 | assert_not_master(test_timings)
75 |
76 | for test_timing in test_timings:
77 | assert_duration(test_timing.runtime)
78 |
79 |
80 | def assert_fixture(fixture: FixtureTiming, root_name: str):
81 | assert_duration(fixture.setup)
82 |
83 | if not fixture.name.startswith(f"{root_name}."):
84 | return
85 |
86 | if fixture.name.startswith(f"{root_name}.teardown_"):
87 | assert_duration(fixture.teardown)
88 | else:
89 | assert fixture.teardown is None
90 |
91 |
92 | def assert_fixtures(results: list[Timing], is_xdist: bool, root_name: str):
93 | fixture_timings = get_timing_items(results, FixtureTiming)
94 | assert fixture_timings != []
95 | if is_xdist:
96 | assert_not_master(fixture_timings)
97 |
98 | test_names_map = {
99 | test.test_id: test for test in get_timing_items(results, PyTestTiming)
100 | }
101 |
102 | for fixture_timing in fixture_timings:
103 | if fixture_timing.test_id is not None:
104 | assert fixture_timing.test_id in test_names_map
105 | assert_fixture(fixture_timing, root_name)
106 |
107 |
108 | def assert_mocks(results: list[Timing], is_xdist: bool, root_name: str):
109 | fixture_timings = get_timing_items(results, FixtureTiming)
110 | mock_timings = get_timing_items(results, MockTiming)
111 | assert mock_timings != []
112 | if is_xdist:
113 | assert_not_master(mock_timings)
114 |
115 | mock_names = {mock.name for mock in mock_timings}
116 | assert mock_names == {
117 | "urllib.parse.parse_qs",
118 | "urllib.parse.quote",
119 | "urllib.parse.urlparse",
120 | }
121 |
122 | assert set(
123 | mock.test_id.split("::")[1] for mock in mock_timings if mock.test_id
124 | ) == {"test_case"}
125 |
126 | fixture_map = {fixture.name: fixture for fixture in fixture_timings}
127 |
128 | # All fixtures should have called the mock, excluding the indirect fixture
129 | expected_fixtures = {
130 | f"{root_name}.fixture",
131 | f"{root_name}.teardown_fixture",
132 | }
133 | fixtures_calling_mock = {
134 | mock_timing.fixture_name
135 | for mock_timing in mock_timings
136 | if mock_timing.fixture_name is not None
137 | }
138 | assert fixtures_calling_mock == expected_fixtures
139 |
140 | for mock_timing in mock_timings:
141 | assert_duration(mock_timing.runtime)
142 | assert mock_timing.fixture_name or mock_timing.test_id
143 | if mock_timing.test_id:
144 | assert mock_timing.test_id == f"{root_name}.py::test_case"
145 | # Check that the fixture name is in the map
146 | if mock_timing.fixture_name is not None:
147 | assert mock_timing.fixture_name in fixture_map
148 |
149 |
150 | def assert_suite(result: RunResult, timings: list[Timing], with_xdist: bool):
151 | result.assert_outcomes(passed=1)
152 |
153 | assert_results_collection(timings)
154 | assert_tests(timings, with_xdist)
155 | assert_fixtures(timings, with_xdist, root_name="test_suite")
156 |
157 | if with_xdist:
158 | assert_xdist_workers_ready(timings)
159 |
160 |
161 | def test_simple(run_tests, output_file, with_xdist):
162 | result, timings = run_tests("test_simple.py")
163 | assert_suite(result, timings, with_xdist)
164 |
165 |
166 | def test_mocks(run_tests, output_file, with_xdist):
167 | result, timings = run_tests(
168 | "test_mock.py",
169 | "--scrutinize-func=urllib.parse.urlparse,urllib.parse.parse_qs",
170 | "--scrutinize-func=urllib.parse.quote",
171 | )
172 | assert_suite(result, timings, with_xdist)
173 | assert_mocks(timings, with_xdist, root_name="test_mock")
174 |
175 |
176 | def test_gc(run_tests, output_file, with_xdist):
177 | result, timings = run_tests("test_simple.py", "--scrutinize-gc")
178 | assert_suite(result, timings, with_xdist)
179 | gc_timings = get_timing_items(timings, GCTiming)
180 | assert gc_timings != []
181 | for gc_timing in gc_timings:
182 | assert_duration(gc_timing.runtime)
183 |
184 | if with_xdist:
185 | all_workers = {result.meta.worker for result in timings}
186 | assert all_workers != {"master"}
187 |
188 |
189 | @pytest.mark.parametrize("with_query", [True, False])
190 | def test_django(run_tests, output_file, with_xdist, with_query):
191 | flag = "--scrutinize-django-sql"
192 | if with_query:
193 | flag = f"{flag}=query"
194 | result, timings = run_tests(
195 | "test_django.py", "--ds=tests.django_app.settings", flag
196 | )
197 | assert_suite(result, timings, with_xdist)
198 | sql_timings = get_timing_items(timings, DjangoSQLTiming)
199 | assert sql_timings != []
200 |
201 | timings_by_fixture: dict[str, list[DjangoSQLTiming]] = collections.defaultdict(list)
202 | timings_by_test: dict[str, list[DjangoSQLTiming]] = collections.defaultdict(list)
203 |
204 | for sql_timing in sql_timings:
205 | assert_duration(sql_timing.runtime)
206 | if sql_timing.fixture_name:
207 | timings_by_fixture[sql_timing.fixture_name].append(sql_timing)
208 | elif sql_timing.test_id:
209 | # Only test queries, no fixtures at all
210 | timings_by_test[sql_timing.test_id].append(sql_timing)
211 |
212 | # All of our SQL hashes (and queries) should be the same
213 | sql_hashes = set(
214 | (timing.sql_hash, timing.sql)
215 | for fixture_name in ("test_django.teardown_fixture", "test_django.fixture")
216 | for timing in timings_by_fixture[fixture_name]
217 | )
218 | assert len(sql_hashes) == 1
219 |
220 | assert_unique(
221 | (timing.sql_hash, timing.sql)
222 | for timing in timings_by_test["test_django.py::test_case"]
223 | )
224 |
225 |
226 | def test_all(run_tests, output_file, with_xdist):
227 | result, timings = run_tests(
228 | "test_simple.py",
229 | "--scrutinize-django-sql",
230 | "--scrutinize-gc",
231 | "--scrutinize-func=urllib.parse.urlparse",
232 | )
233 | assert_suite(result, timings, with_xdist)
234 |
--------------------------------------------------------------------------------