├── .git-blame-ignore-revs ├── .gitattributes ├── .github ├── ISSUE_TEMPLATE │ ├── bug_report.yml │ ├── config.yml │ └── feature_request.yml ├── dependabot.yml ├── pull_request_template.md ├── release.yml └── workflows │ ├── approve.yml │ ├── docs.yml │ ├── release.yml │ └── test.yml ├── .gitignore ├── .pre-commit-config.yaml ├── CONTRIBUTING.md ├── LICENSE ├── Makefile ├── README.md ├── docs ├── css │ └── docs.css ├── custom.md ├── depth.md ├── filtering.md ├── fragments.md ├── index.md ├── quickstart.md ├── relay.md ├── settings.md └── technical.md ├── example_project ├── __init__.py ├── app │ ├── __init__.py │ ├── admin.py │ ├── apps.py │ ├── management │ │ ├── __init__.py │ │ └── commands │ │ │ ├── __init__.py │ │ │ └── create_test_data.py │ ├── migrations │ │ ├── 0001_initial.py │ │ └── __init__.py │ ├── models.py │ ├── schema.py │ ├── types.py │ └── utils.py └── config │ ├── __init__.py │ ├── logging.py │ ├── settings.py │ ├── urls.py │ └── wsgi.py ├── manage.py ├── mkdocs.yml ├── poetry.lock ├── pyproject.toml ├── query_optimizer ├── __init__.py ├── ast.py ├── compiler.py ├── converters.py ├── errors.py ├── fields.py ├── filter.py ├── filter_info.py ├── optimizer.py ├── prefetch_hack.py ├── py.typed ├── selections.py ├── settings.py ├── types.py ├── typing.py ├── utils.py └── validators.py └── tests ├── __init__.py ├── conftest.py ├── factories ├── __init__.py ├── _base.py ├── apartment.py ├── building.py ├── developer.py ├── employee.py ├── example.py ├── housing_company.py ├── owner.py ├── ownership.py ├── postal_code.py ├── property_manager.py ├── real_estate.py ├── sale.py ├── shareholder.py └── tag.py ├── helpers.py ├── test_all_relation_types.py ├── test_basic_relations.py ├── test_fields.py ├── test_filtering.py ├── test_fragments.py ├── test_misc.py ├── test_pagination.py ├── test_relay_connection.py ├── test_relay_node.py ├── test_selections.py ├── test_utils.py └── test_validators.py /.git-blame-ignore-revs: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MrThearMan/graphene-django-query-optimizer/f8291cd21c215f7a84ad79836c6e40a651d15ee2/.git-blame-ignore-revs -------------------------------------------------------------------------------- /.gitattributes: -------------------------------------------------------------------------------- 1 | # Default behavior 2 | # ============ 3 | * text=auto 4 | 5 | # Source files 6 | # ============ 7 | *manage.py text diff=python eol=lf 8 | *.py text diff=python 9 | *.pxd text diff=python 10 | *.py3 text diff=python 11 | *.pyw text diff=python 12 | *.pyx text diff=python 13 | *.pyz text diff=python 14 | *.pyi text diff=python 15 | 16 | # Plain text 17 | # ============ 18 | *.js text diff=js 19 | *.css text diff=css 20 | *.html text diff=html 21 | *.md text diff=markdown 22 | *.json text diff=json eol=lf 23 | *.toml text diff=toml eol=lf 24 | *.yml text diff=yaml eol=lf 25 | *.yaml text diff=yaml eol=lf 26 | 27 | # Archives 28 | # ============ 29 | *.7z filter=lfs diff=lfs merge=lfs -text 30 | *.br filter=lfs diff=lfs merge=lfs -text 31 | *.gz filter=lfs diff=lfs merge=lfs -text 32 | *.tar filter=lfs diff=lfs merge=lfs -text 33 | *.zip filter=lfs diff=lfs merge=lfs -text 34 | 35 | # Binary files 36 | # ============ 37 | *.db binary 38 | *.p binary 39 | *.pkl binary 40 | *.pickle binary 41 | *.pyc binary export-ignore 42 | *.pyo binary export-ignore 43 | *.pyd binary 44 | *.png binary 45 | *.jpg binary 46 | 47 | # Unix specific 48 | # ============ 49 | *.sh text eol=lf 50 | *.bash text eol=lf 51 | *.fish text eol=lf 52 | 53 | # Windows specific 54 | # ============ 55 | *.cmd text eol=crlf 56 | *.bat text eol=crlf 57 | *.ps1 text eol=crlf 58 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/bug_report.yml: -------------------------------------------------------------------------------- 1 | name: Bug Report 2 | description: File a bug report. 3 | labels: 4 | - bug 5 | body: 6 | - type: markdown 7 | id: thank-you 8 | attributes: 9 | value: | 10 | Thank you for your interest in the library! 11 | - type: checkboxes 12 | id: docs-read 13 | attributes: 14 | label: I have read the docs thoroughly before making this bug report. 15 | description: >- 16 | Docs can be found [here](https://mrthearman.github.io/graphene-django-query-optimizer/). 17 | options: 18 | - label: "Yes" 19 | required: true 20 | - type: checkboxes 21 | id: other-issues 22 | attributes: 23 | label: I have read through other open issues, and my issue is not a duplicate. 24 | description: >- 25 | Open issues can be found [here](https://github.com/MrThearMan/graphene-django-query-optimizer/issues). 26 | options: 27 | - label: "Yes" 28 | required: true 29 | - type: dropdown 30 | id: version 31 | attributes: 32 | label: What version of the library you are using? 33 | description: >- 34 | Please note that library versions not available below are not supported. 35 | If you are not using the latest version, please try to also reproduce the bug 36 | on the latest version before opening the issue. 37 | options: 38 | - "0.10.11" 39 | - "0.10.10" 40 | - "0.10.9" 41 | - "0.10.8" 42 | - "0.10.7" 43 | - "0.10.6" 44 | - "0.10.5" 45 | - "0.10.4" 46 | - "0.10.3" 47 | - "0.10.2" 48 | - "0.10.1" 49 | - "0.10.0" 50 | - "0.9.0" 51 | - "0.8.5" 52 | validations: 53 | required: true 54 | - type: dropdown 55 | id: python 56 | attributes: 57 | label: Which python version are you using? 58 | description: >- 59 | Please note that python versions not available below are not supported. 60 | options: 61 | - "3.13" 62 | - "3.12" 63 | - "3.11" 64 | - "3.10" 65 | - "3.9" 66 | validations: 67 | required: true 68 | - type: dropdown 69 | id: os 70 | attributes: 71 | label: What operating system are you on? 72 | description: >- 73 | Please note operating systems not available below may not be supported. 74 | Only the latest versions of these operating systems are tested. 75 | options: 76 | - "Windows" 77 | - "Mac" 78 | - "Ubuntu" 79 | validations: 80 | required: true 81 | - type: textarea 82 | id: bug-description 83 | attributes: 84 | label: Description 85 | description: >- 86 | Give detailed steps on how to reproduce the bug from a new virtual environment. 87 | Error tracebacks are appreciated (unabridged when possible). 88 | Do not use pictures to include the error, add [code blocks](https://www.markdownguide.org/extended-syntax/#fenced-code-blocks) instead. 89 | validations: 90 | required: true 91 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/config.yml: -------------------------------------------------------------------------------- 1 | blank_issues_enabled: false 2 | contact_links: 3 | - name: Question or Problem 4 | about: Ask in GitHub Discussions. 5 | url: https://github.com/MrThearMan/graphene-django-query-optimizer/discussions 6 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/feature_request.yml: -------------------------------------------------------------------------------- 1 | name: Feature request 2 | description: Make a feature request. 3 | labels: 4 | - enhancement 5 | body: 6 | - type: markdown 7 | id: thank-you 8 | attributes: 9 | value: | 10 | Thank you for your interest in the library! 11 | - type: checkboxes 12 | id: docs-read 13 | attributes: 14 | label: I have read the docs thoroughly before making this feature request. 15 | description: >- 16 | Docs can be found [here](https://mrthearman.github.io/graphene-django-query-optimizer/). 17 | options: 18 | - label: "Yes" 19 | required: true 20 | - type: checkboxes 21 | id: other-issues 22 | attributes: 23 | label: I have read through other open issues, and my issue is not a duplicate. 24 | description: >- 25 | Open issues can be found [here](https://github.com/MrThearMan/graphene-django-query-optimizer/issues). 26 | options: 27 | - label: "Yes" 28 | required: true 29 | - type: textarea 30 | id: feature-description 31 | attributes: 32 | label: Description 33 | description: >- 34 | Please describe the feature and how you want it to work. 35 | validations: 36 | required: true 37 | - type: textarea 38 | id: motivation 39 | attributes: 40 | label: Motivation 41 | description: >- 42 | What are you trying to do? 43 | How would this feature help you? 44 | validations: 45 | required: true 46 | - type: dropdown 47 | id: contributor 48 | attributes: 49 | label: Would you like to solve this issue yourself with a pull request? 50 | description: >- 51 | If yes, please read the contributing instructions 52 | [here](https://github.com/MrThearMan/graphene-django-query-optimizer/blob/main/CONTRIBUTING.md). 53 | options: 54 | - "Yes" 55 | - "No" 56 | validations: 57 | required: true 58 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | # Setup dependabot updates 2 | 3 | version: 2 4 | 5 | updates: 6 | 7 | # Update GitHub Actions 8 | - package-ecosystem: "github-actions" 9 | directory: "/" 10 | schedule: 11 | interval: "weekly" 12 | pull-request-branch-name: 13 | separator: "-" 14 | groups: 15 | github-actions-dependencies: 16 | patterns: 17 | - "*" 18 | 19 | # Update Poetry dependencies 20 | - package-ecosystem: "pip" # yes, this is correct 21 | directory: "/" 22 | schedule: 23 | interval: "weekly" 24 | pull-request-branch-name: 25 | separator: "-" 26 | groups: 27 | python-dependencies: 28 | patterns: 29 | - "*" 30 | -------------------------------------------------------------------------------- /.github/pull_request_template.md: -------------------------------------------------------------------------------- 1 | [//]: # "Please read `CONTRIBUTING.md` before opening a pull request." 2 | 3 | # Description 4 | [//]: # "Describe the changes in this pull request here." 5 | 6 | ... 7 | 8 | --- 9 | 10 | [//]: # "Use '#123' to refer to issue number 123." 11 | Closes Issue: # 12 | 13 | --- 14 | -------------------------------------------------------------------------------- /.github/release.yml: -------------------------------------------------------------------------------- 1 | 2 | changelog: 3 | categories: 4 | - title: Features 5 | labels: 6 | - "*" 7 | exclude: 8 | labels: 9 | - bug 10 | - maintenance 11 | - dependencies 12 | - title: Fixes 13 | labels: 14 | - bug 15 | - title: Maintenance 16 | labels: 17 | - maintenance 18 | - title: Automated 19 | labels: 20 | - dependencies 21 | -------------------------------------------------------------------------------- /.github/workflows/approve.yml: -------------------------------------------------------------------------------- 1 | name: Auto approve PRs 2 | 3 | on: 4 | pull_request_target: 5 | 6 | jobs: 7 | approve: 8 | permissions: 9 | pull-requests: write 10 | contents: write 11 | uses: MrThearMan/CI/.github/workflows/approve.yml@v0.4.15 12 | -------------------------------------------------------------------------------- /.github/workflows/docs.yml: -------------------------------------------------------------------------------- 1 | name: Docs 2 | 3 | on: 4 | push: 5 | branches: 6 | - main 7 | paths: 8 | - "docs/**" 9 | - "mkdocs.yml" 10 | - ".github/workflows/docs.yml" 11 | workflow_dispatch: 12 | 13 | jobs: 14 | docs: 15 | uses: MrThearMan/CI/.github/workflows/docs.yml@v0.4.15 16 | with: 17 | poetry-version: "2.0.0" 18 | -------------------------------------------------------------------------------- /.github/workflows/release.yml: -------------------------------------------------------------------------------- 1 | name: Release 2 | 3 | on: 4 | release: 5 | types: 6 | - released 7 | 8 | jobs: 9 | release: 10 | uses: MrThearMan/CI/.github/workflows/release.yml@v0.4.15 11 | secrets: 12 | pypi-token: ${{ secrets.PYPI_API_TOKEN }} 13 | with: 14 | poetry-version: "2.0.0" 15 | -------------------------------------------------------------------------------- /.github/workflows/test.yml: -------------------------------------------------------------------------------- 1 | name: Test 2 | 3 | on: 4 | push: 5 | branches: 6 | - main 7 | paths: 8 | - "**.py" 9 | - "pyproject.toml" 10 | - "poetry.lock" 11 | - ".github/workflows/test.yml" 12 | pull_request: 13 | workflow_dispatch: 14 | 15 | jobs: 16 | test: 17 | uses: MrThearMan/CI/.github/workflows/test.yml@v0.4.15 18 | with: 19 | python-version: '["3.10", "3.11", "3.12", "3.13"]' 20 | poetry-version: "2.0.0" 21 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *.idea 2 | *.tox 3 | *.ruff_cache 4 | *.mypy_cache 5 | *.pytest_cache 6 | *__pycache__ 7 | *.coverage 8 | example_project/config/testdb 9 | pytest.ini 10 | profile.svg 11 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | ci: 2 | skip: [ 3 | "poetry-lock", 4 | ] 5 | 6 | repos: 7 | 8 | - repo: https://github.com/pre-commit/pre-commit-hooks 9 | rev: v5.0.0 10 | hooks: 11 | - id: check-toml 12 | - id: check-yaml 13 | args: [ 14 | "--unsafe", 15 | ] 16 | - id: check-json 17 | - id: trailing-whitespace 18 | args: [ 19 | "--markdown-linebreak-ext=md" 20 | ] 21 | 22 | - repo: https://github.com/astral-sh/ruff-pre-commit 23 | rev: v0.11.12 24 | hooks: 25 | - id: ruff 26 | - id: ruff-format 27 | 28 | - repo: https://github.com/python-poetry/poetry 29 | rev: 2.1.3 30 | hooks: 31 | - id: poetry-check 32 | - id: poetry-lock 33 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing 2 | 3 | Thank you for your interest in contributing! 4 | 5 | To start, please read the library [docs] thoroughly. 6 | If you don't find what you are looking for, proceed with the steps below. 7 | 8 | ## I found a bug! 9 | 10 | Please file a [bug report]. If you are not using the latest version of the library, 11 | please upgrade and see if that fixes the issue. If not, please create a minimal example 12 | that demonstrates the bug, and instructions on how to create that setup from a new virtual 13 | environment. Also include any error tracebacks (unabridged when possible). This will help 14 | a lot when diagnosing the bug. Do not use pictures to include the traceback. 15 | 16 | ## I have a feature request! 17 | 18 | You can suggest new features to be implemented via a [feature request]. 19 | You can ask me to implement it, or work on it yourself, but all features should 20 | be discussed and agreed upon first before any coding is done. 21 | 22 | ## I have a question! 23 | 24 | Please ask it in the [discussions section] instead of creating an issue. 25 | If your question warrants an issue, I'll ask you to create it. 26 | Questions about clarifying documentation are appreciated! 27 | 28 | ## Creating a pull request 29 | 30 | Once you have created a [feature request], we have agreed on an implementation, 31 | and you wish to work on it, follow these steps to create a pull request. 32 | 33 | 1. [Fork the repository][fork]. 34 | 2. Clone your fork and create a new branch from the `main` branch. 35 | 3. [Set up the environment][setup]. 36 | 4. Make changes and write tests following [these guidelines][code-guidelines]. 37 | 5. Add documentation when applicable following [these guidelines][docs-guidelines]. 38 | 6. Push the changes to your fork. 39 | 7. Create a [pull request] targeting the main branch. 40 | 8. Sit back while your pull request is [reviewed]. 41 | 42 | Note that a pull request should always be aimed at solving a single issue. 43 | If you want multiple issues solved, make separate pull requests for each. 44 | Spelling mistakes are the exception, they are always welcome! 45 | 46 | Pull requests should be kept as small as possible while following the guidelines 47 | mentioned above. Smaller pull request are easier to review and test, which helps 48 | them get merged. 49 | 50 | ## Code review process 51 | 52 | Pull requests will be reviewed automatically and manually. 53 | 54 | In the automated phase, [GitHub Actions] will run testing pipelines for all supported 55 | operating systems and python versions, and [pre-commit CI] will check linting rules. 56 | If you encounter any errors, try to fix them based on what the pipelines tell you. 57 | If coverage is lowered, add tests, noting the guidelines [here][code-guidelines]. 58 | Don't be afraid to ask for advice if you're unsure what is wrong. 59 | 60 | > Note for first-time contributors: Checks are not allowed to run automatically for 61 | > first-time contributors, so you'll need me to approve them each time you push new code. 62 | 63 | > Known issues: GitHub Actions might fail unexpectedly when installing dependencies. 64 | > If this happens, the failed jobs need to be run again a few times to get past this. 65 | 66 | In the manual phase, I will review the pull request by adding comments with suggestions 67 | for changes. If you agree with the suggestions, implement them, and push the changes to 68 | you fork; the pull request wil be updated automatically. You can either amend your previous 69 | commits or add more commits, either is fine. If you disagree with the suggestions, provide 70 | your reasons for disagreeing, and we can discuss what to do. 71 | 72 | Once all automated checks have passed, and I have accepted the pull request, your code will be 73 | merged to the `main` branch. Any related issues should be closed as completed. 74 | I'll usually make a [new release] after each new feature, but if not, you can also ask for one. 75 | 76 | ## Creating a new release 77 | 78 | 1. Increment the version in `pyproject.toml` according [semantic versioning] rules. 79 | 2. Push the change to the `main` branch with the commit message `Bump version`. 80 | 3. [Draft a new release] on GitHub. 81 | - Use `v{version}` (e.g. v1.2.3) for the tag name, and `Release {version}` for 82 | the release title, using the same version that's in `pyproject.toml`. 83 | - Fill in the release description. 84 | - Add any attachments when applicable. 85 | 4. Publish the release. This will start the `release` pipeline on [GitHub Actions]. 86 | 5. Check that the release pipeline was successful. If not, delete the tag from origin 87 | with `git push --delete origin {tag_name}` and fix the issue before trying again. 88 | 89 | > Note, that the release will be made with the `pyproject.toml` version and not the 90 | > `tag` version, and that this is not checked anywhere, so make sure they match! 91 | 92 | ## Setting up the environment 93 | 94 | 1. Install [Poetry]. 95 | 2. Install [Make]. 96 | - Windows: Install [Chocolatey] and then `choco install make`. 97 | - Mac: Install [Homebrew] and then `brew install make`. 98 | - Ubuntu: `apt install make`. 99 | 3. Run `poetry install` to create a virtual environment and install project dependencies. 100 | 4. Run `make hook` to install the [pre-commit] hooks. 101 | 102 | Run `make help` to list all existing development commands and their descriptions. 103 | 104 | ## Testing 105 | 106 | Tests can be run with `make tests`, and individual tests with `make test `. 107 | This will run tests in you [local environment][setup]. 108 | 109 | You can also test your code in multiple environments with [tox]. To do this, you must 110 | install python interpreters for all python version the library supports, then run 111 | `make tox`. 112 | 113 | Linting can be run on-demand with `make pre-commit`, or automatically before commits 114 | when installed with `make hook` 115 | 116 | ## Guidelines for writing code 117 | 118 | - All code should be tested with 100% coverage 119 | - Do not write test simply to archive 100% coverage. Instead, write tests for all the ways the 120 | feature could be used (use cases), including ways that should not work, and then test for coverage. 121 | If you find uncovered code, see if you can remove it, or maybe you simply missed a use case. 122 | You should always need more tests to cover the all use cases than to achieve 100% coverage. 123 | - Comments that ignore test coverage (`# pragma: no cover`) should be used _**very**_ sparingly. 124 | They are often not necessary and can lead to undocumented behavior if you are not careful. 125 | 126 | - All code should be typed when possible. 127 | - Tests are an exception to this; typing them is optional. 128 | - Make sure the typing methods used are supported in all python versions 129 | the library supports (e.g., use `List[int]` instead of `list[int]` for Python 3.8 support). 130 | CI will yell at you if you don't. 131 | - Create all custom types in `query_optimizer/typing.py` and import them from there. 132 | This avoids circular imports. 133 | - Use of `TypedDict` is encouraged where dicts would be used. 134 | - Also import common types like `List` from `query_optimizer/typing.py` instead of the built-in `typing` module. 135 | This is to make importing types more consistent across the codebase, and allows conditional import 136 | logic with the `typing_extensions` module for newer typing methods like `ParamSpec` to be contained 137 | in a single place. 138 | - Using `mypy` for static type checking is optional, and will likely lead to many "errors" detected. 139 | 140 | - All functions, methods, and classes should include a docstring (*) in [reStructuredText format][pep287]. 141 | - (*) Code that is short and _clearly_ self-documenting does not necessarily need a docstring. 142 | As a dumb example, `def sum(i: int, j: int) -> int: return i + j` does not need a docstring. 143 | This applies more broadly to arguments, e.g., when a function might need a docstring, the arguments 144 | might not need explicit documentation. 145 | - Keep the docstring to the point. Each line of documentation has a maintenance cost. 146 | Documentation is not an excuse to write code that is hard to understand. 147 | Docstrings should not include code examples, they belong to [docs]. 148 | 149 | - All code should be linted using the provided [pre-commit] hooks. 150 | - Easiest way to do this is to install the pre-commit hooks with `make hook`. This will make 151 | sure the pre-commit hooks will run automatically when you make a commit. 152 | - Comments that ignore linting rules (`# type: ignore`, `# fmt: off`, `# noqa`) should be used 153 | _**very**_ sparingly. They are often not necessary and can lead to undocumented behavior 154 | if you are not careful. 155 | 156 | ## Guidelines for writing documentation 157 | 158 | - All documentation is written in `docs/` using markdown, and built with [mkdocs]. 159 | - Write in idiomatic english, using simple language. 160 | - Keep examples simple and self-contained. Don't try to list all possible scenarios at once. 161 | Give the reader time to understand the basics before going over edge cases. 162 | - Use markdown features, like [fenced code blocks][code block], [blockquotes], [horizontal rules], 163 | or [links], to emphasize and format text. 164 | - If diagrams are needed, use [mermaid.js] inside a [fenced code block][code block]. 165 | - Break up lines around the 100 characters mark. This improves readability on wider monitors 166 | without adjusting the window size (and when not using text-wrapping). 167 | - Do not use emojis. 168 | - Double-check for spelling mistakes and grammar. 169 | 170 | ## License 171 | 172 | By contributing, you agree that your contributions will be licensed under the [MIT Licence]. 173 | 174 | 175 | [docs]: https://mrthearman.github.io/graphene-django-query-optimizer/ 176 | [Issue]: https://github.com/MrThearMan/graphene-django-query-optimizer/issues/new/choose 177 | [bug report]: https://github.com/MrThearMan/graphene-django-query-optimizer/issues/new?template=bug_report.yml 178 | [feature request]: https://github.com/MrThearMan/graphene-django-query-optimizer/issues/new?template=feature_request.yml 179 | [discussions section]: https://github.com/MrThearMan/graphene-django-query-optimizer/discussions 180 | [pull request]: https://github.com/MrThearMan/graphene-django-query-optimizer/compare 181 | [fork]: https://github.com/MrThearMan/graphene-django-query-optimizer/fork 182 | [setup]: https://github.com/MrThearMan/graphene-django-query-optimizer/blob/main/CONTRIBUTING.md#setting-up-the-environment 183 | [tox]: https://tox.wiki/ 184 | [code-guidelines]: https://github.com/MrThearMan/graphene-django-query-optimizer/blob/main/CONTRIBUTING.md#guidelines-for-writing-code 185 | [docs-guidelines]: https://github.com/MrThearMan/graphene-django-query-optimizer/blob/main/CONTRIBUTING.md#guidelines-for-writing-documentation 186 | [reviewed]: https://github.com/MrThearMan/graphene-django-query-optimizer/blob/main/CONTRIBUTING.md#code-review-process 187 | [Github Actions]: https://github.com/features/actions 188 | [pre-commit ci]: https://pre-commit.ci/ 189 | [new release]: https://github.com/MrThearMan/graphene-django-query-optimizer/blob/main/CONTRIBUTING.md#creating-a-new-release 190 | [semantic versioning]: https://semver.org/ 191 | [Draft a new release]: https://github.com/MrThearMan/graphene-django-query-optimizer/releases/new 192 | [poetry]: https://python-poetry.org/docs/#installation 193 | [make]: https://man7.org/linux/man-pages/man1/make.1.html 194 | [chocolatey]: https://chocolatey.org/install 195 | [homebrew]: https://docs.brew.sh/Installation 196 | [pre-commit]: https://pre-commit.com/ 197 | [pep287]: https://peps.python.org/pep-0287/ 198 | [mkdocs]: https://www.mkdocs.org/ 199 | [mermaid.js]: https://mermaid.js.org/ 200 | [code block]: https://www.mkdocs.org/user-guide/writing-your-docs/#fenced-code-blocks 201 | [blockquotes]: https://www.markdownguide.org/basic-syntax#blockquotes-1 202 | [horizontal rules]: https://www.markdownguide.org/basic-syntax#horizontal-rules 203 | [links]: https://www.markdownguide.org/basic-syntax#links 204 | [MIT Licence]: http://choosealicense.com/licenses/mit/ 205 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) Matti Lamppu 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | export DJANGO_SETTINGS_MODULE = example_project.config.settings 2 | 3 | .PHONY: Makefile 4 | .PHONY: create-user 5 | .PHONY: dev 6 | .PHONY: docs 7 | .PHONY: flush 8 | .PHONY: generate 9 | .PHONY: help 10 | .PHONY: hook 11 | .PHONY: lint 12 | .PHONY: migrate 13 | .PHONY: migrations 14 | .PHONY: profile 15 | .PHONY: setup 16 | .PHONY: test 17 | .PHONY: tests 18 | .PHONY: tox 19 | 20 | # Trick to allow passing commands to make 21 | # Use quotes (" ") if command contains flags (-h / --help) 22 | args = `arg="$(filter-out $@,$(MAKECMDGOALS))" && echo $${arg:-${1}}` 23 | 24 | # If command doesn't match, do not throw error 25 | %: 26 | @: 27 | 28 | define helptext 29 | 30 | Commands: 31 | 32 | create-user Create a superuser called "x" with password of "x" 33 | dev Serve manual testing server 34 | docs Serve mkdocs for development. 35 | flush Flush database. 36 | generate Generate test data. 37 | hook Install pre-commit hook. 38 | lint Run pre-commit hooks on all files. 39 | migrate Migrate database. 40 | migrations Make migrations. 41 | profile Run py-spy for a given PID. 42 | setup Make migrations, apply them, and add a superuser 43 | test Run all tests maching the given 44 | tests Run all tests with coverage. 45 | tox Run all tests with tox. 46 | 47 | Use quotes (" ") if command contains flags (-h / --help) 48 | endef 49 | 50 | export helptext 51 | 52 | help: 53 | @echo "$$helptext" 54 | 55 | create-user: 56 | @DJANGO_SUPERUSER_PASSWORD=x poetry run python manage.py createsuperuser --username x --email user@user.com --no-input 57 | 58 | dev: 59 | @poetry run python manage.py runserver localhost:8000 60 | 61 | docs: 62 | @poetry run mkdocs serve -a localhost:8080 63 | 64 | flush: 65 | @poetry run python manage.py flush --no-input 66 | 67 | generate: 68 | @poetry run python manage.py create_test_data 69 | 70 | hook: 71 | @poetry run pre-commit install 72 | 73 | lint: 74 | @poetry run pre-commit run --all-files 75 | 76 | migrate: 77 | @poetry run python manage.py migrate 78 | 79 | migrations: 80 | @poetry run python manage.py makemigrations 81 | 82 | profile: 83 | @poetry run py-spy record -o profile.svg --pid $(call args, "") 84 | 85 | setup: migrations migrate create-user 86 | 87 | test: 88 | @poetry run pytest -k $(call args, "") 89 | 90 | tests: 91 | @poetry run coverage run -m pytest 92 | @poetry run coverage report -m 93 | 94 | tox: 95 | @poetry run tox 96 | 97 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Graphene Django Query Optimizer 2 | 3 | [![Coverage Status][coverage-badge]][coverage] 4 | [![GitHub Workflow Status][status-badge]][status] 5 | [![PyPI][pypi-badge]][pypi] 6 | [![GitHub][licence-badge]][licence] 7 | [![GitHub Last Commit][repo-badge]][repo] 8 | [![GitHub Issues][issues-badge]][issues] 9 | [![Downloads][downloads-badge]][pypi] 10 | [![Python Version][version-badge]][pypi] 11 | 12 | ```shell 13 | pip install graphene-django-query-optimizer 14 | ``` 15 | 16 | --- 17 | 18 | **Documentation**: [https://mrthearman.github.io/graphene-django-query-optimizer/](https://mrthearman.github.io/graphene-django-query-optimizer/) 19 | 20 | **Source Code**: [https://github.com/MrThearMan/graphene-django-query-optimizer/](https://github.com/MrThearMan/graphene-django-query-optimizer/) 21 | 22 | **Contributing**: [https://github.com/MrThearMan/graphene-django-query-optimizer/blob/main/CONTRIBUTING.md](https://github.com/MrThearMan/graphene-django-query-optimizer/blob/main/CONTRIBUTING.md) 23 | 24 | --- 25 | 26 | Solve the GraphQL [N+1 problem] in [graphene-django] applications 27 | just by changing a few imports, automatically adding the appropriate 28 | [`only`](https://docs.djangoproject.com/en/dev/ref/models/querysets/#only), 29 | [`select_related`](https://docs.djangoproject.com/en/dev/ref/models/querysets/#select-related), 30 | and [`prefetch_related`](https://docs.djangoproject.com/en/dev/ref/models/querysets/#prefetch-related) 31 | method calls to your QuerySets to fetch _only_ what you need. 32 | 33 | ```python 34 | import graphene 35 | from example_project.app.models import Example 36 | 37 | from query_optimizer import DjangoObjectType, DjangoListField 38 | 39 | class ExampleType(DjangoObjectType): 40 | class Meta: 41 | model = Example 42 | 43 | class Query(graphene.ObjectType): 44 | all_examples = DjangoListField(ExampleType) 45 | 46 | schema = graphene.Schema(query=Query) 47 | ``` 48 | 49 | [coverage-badge]: https://coveralls.io/repos/github/MrThearMan/graphene-django-query-optimizer/badge.svg?branch=main 50 | [coverage]: https://coveralls.io/github/MrThearMan/graphene-django-query-optimizer?branch=main 51 | [downloads-badge]: https://img.shields.io/pypi/dm/graphene-django-query-optimizer 52 | [graphene-django]: https://github.com/graphql-python/graphene-django 53 | [issues-badge]: https://img.shields.io/github/issues-raw/MrThearMan/graphene-django-query-optimizer 54 | [issues]: https://github.com/MrThearMan/graphene-django-query-optimizer/issues 55 | [licence-badge]: https://img.shields.io/github/license/MrThearMan/graphene-django-query-optimizer 56 | [licence]: https://github.com/MrThearMan/graphene-django-query-optimizer/blob/main/LICENSE 57 | [N+1 problem]: https://stackoverflow.com/a/97253 58 | [pypi-badge]: https://img.shields.io/pypi/v/graphene-django-query-optimizer 59 | [pypi]: https://pypi.org/project/graphene-django-query-optimizer 60 | [repo-badge]: https://img.shields.io/github/last-commit/MrThearMan/graphene-django-query-optimizer 61 | [repo]: https://github.com/MrThearMan/graphene-django-query-optimizer/commits/main 62 | [status-badge]: https://img.shields.io/github/actions/workflow/status/MrThearMan/graphene-django-query-optimizer/test.yml?branch=main 63 | [status]: https://github.com/MrThearMan/graphene-django-query-optimizer/actions/workflows/test.yml 64 | [version-badge]: https://img.shields.io/pypi/pyversions/graphene-django-query-optimizer 65 | -------------------------------------------------------------------------------- /docs/css/docs.css: -------------------------------------------------------------------------------- 1 | body { 2 | color: #bbbbbb; 3 | } 4 | 5 | ::-webkit-scrollbar { 6 | width: 8px; 7 | height: 8px; 8 | background: #222222!important; 9 | } 10 | 11 | ::-webkit-scrollbar-track { 12 | border-radius: 5px; 13 | background: #202020!important; 14 | } 15 | 16 | ::-webkit-scrollbar-thumb { 17 | border-radius: 5px; 18 | background: #424242!important; 19 | } 20 | 21 | .wy-nav-content { 22 | max-width: 1024px; 23 | margin: auto; 24 | } 25 | 26 | .wy-side-scroll { 27 | overflow-y: auto; 28 | height: auto; 29 | } 30 | 31 | div.wy-nav-content, 32 | .wy-nav-content-wrap, 33 | .wy-side-nav-search input[type=text] { 34 | background: #222222; 35 | } 36 | 37 | .wy-menu-vertical li.toctree-l2 a, 38 | .wy-menu-vertical a { 39 | color: #bbbbbb; 40 | background: #343131; 41 | } 42 | 43 | .wy-menu-vertical li.toctree-l2 a:hover, 44 | .wy-menu-vertical a:hover { 45 | background: #2d2d2d; 46 | } 47 | 48 | .wy-side-nav-search input[type=text] { 49 | color: #bbbbbb; 50 | box-shadow: none; 51 | caret-color : #bbbbbb; 52 | } 53 | 54 | .rst-content div[class^=highlight] pre { 55 | background: #121212; 56 | } 57 | 58 | .wy-menu-vertical li.current, 59 | .wy-menu-vertical li.current > a, 60 | .wy-menu-vertical li.toctree-l1.current > a, 61 | .wy-menu-vertical li.toctree-l2.current > a, 62 | .wy-menu-vertical li.toctree-l2.current li.toctree-l3 > a, 63 | .wy-menu-vertical li.toctree-l3.current > a, 64 | .wy-menu-vertical li.toctree-l3.current li.toctree-l4 > a { 65 | color: #bbbbbb; 66 | background: #343131!important; 67 | border-bottom: 1px solid #343131; 68 | border-top: 1px solid #343131; 69 | } 70 | 71 | .wy-menu-vertical li.current, 72 | .wy-menu-vertical li.current > a:hover, 73 | .wy-menu-vertical li.toctree-l1.current > a:hover, 74 | .wy-menu-vertical li.toctree-l2.current > a:hover, 75 | .wy-menu-vertical li.toctree-l2.current li.toctree-l3 > a:hover, 76 | .wy-menu-vertical li.toctree-l3.current > a:hover, 77 | .wy-menu-vertical li.toctree-l3.current li.toctree-l4 > a:hover { 78 | color: #bbbbbb; 79 | background: #2d2d2d!important; 80 | border-bottom: 1px solid #343131; 81 | border-top: 1px solid #343131; 82 | } 83 | 84 | .rst-content pre code, 85 | .rst-content code, 86 | .rst-content tt, 87 | code { 88 | color: #bbbbbb; 89 | background: #121212; 90 | border: none; 91 | box-shadow: none; 92 | border-radius: 3px; 93 | } 94 | 95 | code > span.hll { 96 | background: #1c1c1c; 97 | } 98 | 99 | /*First highlighted code line that is followed by another highlighted code line*/ 100 | code > span.hll:not(span.hll + a + span.hll):has(+ a + span.hll) { 101 | /*background: blue;*/ 102 | border-top-right-radius: 3px; 103 | border-top-left-radius: 3px; 104 | } 105 | 106 | /*Last highlighted code line that is preceded by another highlighted code line*/ 107 | code > span.hll:not(span.hll:has(+ a + span.hll)) { 108 | /*background: red;*/ 109 | border-bottom-right-radius: 3px; 110 | border-bottom-left-radius: 3px; 111 | } 112 | 113 | /*Highlighted code line that is not followed or preceded by another highlighted code line*/ 114 | code > span.hll:not(span.hll + a + span.hll):not(:has(+ a + span.hll)) { 115 | /*background: yellow;*/ 116 | border-radius: 3px; 117 | } 118 | 119 | div.highlight > span.filename { 120 | /*background: red;*/ 121 | padding: 8px; 122 | line-height: 1.8rem; 123 | } 124 | 125 | .hljs-attribute, 126 | .hljs-name, 127 | .hljs-tag, 128 | .hljs-keyword, 129 | .hljs-literal { 130 | color: coral; 131 | } 132 | 133 | .hljs-built_in { 134 | color: #8484d5; 135 | } 136 | 137 | .hljs-string { 138 | color: #667e49; 139 | } 140 | 141 | .hljs-title { 142 | color: #f3c069; 143 | } 144 | 145 | .hljs-class .hljs-title { 146 | color: #cccccc; 147 | } 148 | 149 | .rst-content .section .docutils { 150 | display: inline-table; 151 | } 152 | 153 | .rst-content div[class^=highlight], 154 | .rst-content pre.literal-block { 155 | border: 1px solid #626262; 156 | border-radius: 5px; 157 | } 158 | 159 | .rst-content table.docutils thead, 160 | .rst-content table.field-list thead, 161 | .wy-table thead { 162 | color: #bbbbbb; 163 | background: #121212; 164 | border-color: #121212!important; 165 | } 166 | 167 | html.writer-html5 .rst-content table.docutils th { 168 | color: #bbbbbb; 169 | background: #121212; 170 | border-color: #121212!important; 171 | } 172 | 173 | .rst-content table.docutils:not(.field-list) tr:nth-child(2n-1) td, 174 | .wy-table-backed, 175 | .wy-table-odd td, 176 | .wy-table-striped tr:nth-child(2n-1) td { 177 | color: #bbbbbb; 178 | background: #414141; 179 | border-color: #121212!important; 180 | } 181 | 182 | .rst-content table.docutils tbody>tr:last-child td, 183 | .wy-table-bordered-all tbody>tr:last-child td, 184 | .rst-content table.docutils td { 185 | color: #bbbbbb; 186 | background: #343131; 187 | border-color: #121212!important; 188 | } 189 | 190 | .rst-content table.docutils thead, 191 | .rst-content table.field-list thead, 192 | .wy-table thead { 193 | color: #bbbbbb; 194 | background: #343131; 195 | border-color: #121212!important; 196 | } 197 | 198 | .rst-versions { 199 | margin-top: 6px; 200 | } 201 | 202 | blockquote { 203 | background: #323232; 204 | border: 2px solid #363636; 205 | border-left: 4px solid #2980b9; 206 | margin-left: 0 !important; 207 | padding: 16px 12px 16px 24px; 208 | } 209 | 210 | blockquote > p { 211 | margin: 0; 212 | } 213 | 214 | .rst-content table.docutils td { 215 | white-space: break-spaces; 216 | } 217 | 218 | blockquote code { 219 | line-height: normal; 220 | } 221 | 222 | li p { 223 | margin: 0 !important; 224 | } 225 | 226 | 227 | /* 228 | Change mermaid via css here since 229 | mkdocs does not know how to read 230 | style definitions from the mermaid 231 | config 232 | */ 233 | 234 | div.mermaid { 235 | display: flex; 236 | justify-content: center; 237 | } 238 | 239 | div.mermaid svg { 240 | width: auto; 241 | height: auto; 242 | max-width: 100%; 243 | padding: 12px 0; 244 | } 245 | 246 | .edgeLabel { 247 | background-color: transparent!important; 248 | color: #bbbbbb!important; 249 | } 250 | 251 | .messageText { 252 | fill: #bbbbbb!important; 253 | stroke: none!important; 254 | } 255 | 256 | g rect { 257 | fill: #DAE8FC !important; 258 | stroke: #6C8EBF !important; 259 | } 260 | 261 | g polygon { 262 | fill: #FFF2CC !important; 263 | stroke: #E5D092 !important; 264 | } 265 | 266 | g[id*="success__"] rect { 267 | fill: #D5E8D4 !important; 268 | stroke: #82B366 !important; 269 | } 270 | 271 | g[id*="fail__"] rect { 272 | fill: #F8CECC !important; 273 | stroke: #B85450 !important; 274 | } 275 | 276 | g[id*="warn__"] rect { 277 | fill: #FFF2CC !important; 278 | stroke: #E5D092 !important; 279 | } 280 | 281 | g[id*="primary__"] rect { 282 | fill: #DAE8FC !important; 283 | stroke: #6C8EBF !important; 284 | } 285 | 286 | g[id*="secondary__"] rect { 287 | fill: #F5F5F5 !important; 288 | stroke: #666666 !important; 289 | } 290 | 291 | g[id*="start__"] rect { 292 | fill: #000000 !important; 293 | stroke: #666666 !important; 294 | } 295 | 296 | g[id*="start__"] span { 297 | color: #ffffff !important; 298 | } 299 | -------------------------------------------------------------------------------- /docs/custom.md: -------------------------------------------------------------------------------- 1 | # Custom fields 2 | 3 | GraphQL types can have non-model fields using custom resolvers. 4 | 5 | ```python 6 | import graphene 7 | from example_project.app.models import HousingCompany 8 | 9 | from query_optimizer import DjangoObjectType 10 | 11 | class HousingCompanyType(DjangoObjectType): 12 | class Meta: 13 | model = HousingCompany 14 | 15 | greeting = graphene.String() 16 | 17 | def resolve_greeting(root: HousingCompany, info) -> str: 18 | return f"Hello World!" 19 | ``` 20 | 21 | If the custom type requires fields from its related models to resolve, 22 | you have a few options. 23 | 24 | ## AnnotatedField 25 | 26 | This field can be used to add annotations to the queryset when the field is requested. 27 | 28 | ```python 29 | import graphene 30 | from django.db.models import F, Value 31 | from example_project.app.models import HousingCompany 32 | 33 | from query_optimizer import DjangoObjectType, AnnotatedField # new import 34 | 35 | class HousingCompanyType(DjangoObjectType): 36 | class Meta: 37 | model = HousingCompany 38 | 39 | greeting = AnnotatedField(graphene.String, expression=Value("Hello ") + F("name")) 40 | ``` 41 | 42 | Note that only a single annotation can be added, however, you can use the `aliases` 43 | parameter to help with more complex annotations. 44 | 45 | ```python 46 | import graphene 47 | from django.db.models import F, Value 48 | from example_project.app.models import HousingCompany 49 | 50 | from query_optimizer import DjangoObjectType, AnnotatedField 51 | 52 | class HousingCompanyType(DjangoObjectType): 53 | class Meta: 54 | model = HousingCompany 55 | 56 | greeting = AnnotatedField( 57 | graphene.String, 58 | expression=F("hello") + F("name"), 59 | aliases={"hello": Value("Hello ")}, # very complex! 60 | ) 61 | ``` 62 | 63 | ## MultiField 64 | 65 | This field can be used to add multiple fields to the queryset when the field is requested. 66 | 67 | ```python 68 | import graphene 69 | from example_project.app.models import HousingCompany 70 | 71 | from query_optimizer import DjangoObjectType, MultiField # new import 72 | 73 | class HousingCompanyType(DjangoObjectType): 74 | class Meta: 75 | model = HousingCompany 76 | 77 | greeting = MultiField(graphene.String, fields=["pk", "name"]) 78 | 79 | def resolve_greeting(root: HousingCompany, info) -> str: 80 | return f"Hello {root.name} ({root.pk})!" 81 | ``` 82 | 83 | Note that this can only be used for fields on the same model. 84 | 85 | ## ManuallyOptimizedField 86 | 87 | This is the most powerful custom field type, which allows defining a custom method to 88 | manually optimize the queryset when the field is requested. This allows for optimization 89 | strategies that are not possible with the other fields. 90 | 91 | > **Note:** You shouldn't default to using this field, as it can break the optimization 92 | > if you are not careful in considering other optimization already in the optimizer. 93 | 94 | ```python 95 | import graphene 96 | from django.db.models import QuerySet 97 | from example_project.app.models import HousingCompany 98 | 99 | from query_optimizer import DjangoObjectType, ManuallyOptimizedField # new import 100 | from query_optimizer.optimizer import QueryOptimizer # for type hinting 101 | 102 | class HousingCompanyType(DjangoObjectType): 103 | class Meta: 104 | model = HousingCompany 105 | 106 | extra = ManuallyOptimizedField(graphene.String) 107 | 108 | @staticmethod 109 | def optimize_extra(queryset: QuerySet, optimizer: QueryOptimizer, **kwargs) -> QuerySet: 110 | # Do any optimizations here, returning the queryset. 111 | return queryset 112 | 113 | def resolve_extra(root: HousingCompany, info) -> str: 114 | # Still needs a resolver. 115 | return ... 116 | ``` 117 | 118 | Note that this can only be used for fields on the same model. 119 | 120 | ## The `field_name` argument 121 | 122 | `RelatedField`, `DjangoListField`, `DjangoConnectionField` have a `field_name` 123 | argument that can be used to specify the field name in the queryset if it's 124 | different from the field name in the model. 125 | 126 | ```python 127 | from example_project.app.models import HousingCompany 128 | 129 | from query_optimizer import DjangoObjectType, DjangoListField # new import 130 | 131 | class HousingCompanyType(DjangoObjectType): 132 | class Meta: 133 | model = HousingCompany 134 | 135 | developers_alt = DjangoListField("...", field_name="developers") 136 | ``` 137 | 138 | This marks the field as being for the same relation as the `field_name` is on the model, 139 | and it will resolve the field as if it was that relation. This is achieved by using the 140 | `Prefetch("developers", qs, to_attr="developers_alt")` feature from Django. 141 | -------------------------------------------------------------------------------- /docs/depth.md: -------------------------------------------------------------------------------- 1 | # Depth Limiting 2 | 3 | The `optimize()` function has builtin query depth limiting, which 4 | will allow a maximum of 10 `select_related` and `prefetch_related` actions 5 | per query by default. This should be a sensible limit that protects 6 | your API from misuse, but if you need to change it, it can be done on 7 | per resolver basis: 8 | 9 | ```python 10 | import graphene 11 | from example_project.app.models import Apartment 12 | 13 | from query_optimizer import DjangoObjectType, optimize 14 | 15 | class ApartmentType(DjangoObjectType): 16 | class Meta: 17 | model = Apartment 18 | 19 | class Query(graphene.ObjectType): 20 | all_apartments = graphene.List(ApartmentType) 21 | 22 | def resolve_all_apartments(root, info): 23 | return optimize(Apartment.objects.all(), info, max_complexity=4) # changed 24 | 25 | schema = graphene.Schema(query=Query) 26 | ``` 27 | 28 | ...or per ObjectType basis for relay nodes and connections. 29 | 30 | ```python 31 | import graphene 32 | from graphene import relay 33 | from example_project.app.models import Apartment 34 | 35 | from query_optimizer import DjangoObjectType 36 | 37 | class ApartmentNode(DjangoObjectType): 38 | class Meta: 39 | model = Apartment 40 | interfaces = (relay.Node,) 41 | max_complexity = 4 # changed 42 | 43 | class Query(graphene.ObjectType): 44 | apartment = relay.Node.Field(ApartmentNode) 45 | 46 | schema = graphene.Schema(query=Query) 47 | ``` 48 | 49 | You can also set the `MAX_COMPLEXITY` setting in your project's settings.py 50 | to set the value for all optimizers: 51 | 52 | ```python 53 | GRAPHQL_QUERY_OPTIMIZER = { 54 | "MAX_COMPLEXITY": 15, 55 | } 56 | ``` 57 | -------------------------------------------------------------------------------- /docs/filtering.md: -------------------------------------------------------------------------------- 1 | # Filtering 2 | 3 | For adding additional filtering, optional dependency [django-filter][filters] 4 | is required. 5 | 6 | ```python 7 | import graphene 8 | from graphene import relay 9 | from example_project.app.models import Apartment 10 | 11 | from query_optimizer import DjangoObjectType, DjangoConnectionField 12 | 13 | class ApartmentNode(DjangoObjectType): 14 | class Meta: 15 | model = Apartment 16 | filter_fields = { 17 | "street_address": ["exact"], 18 | "building__name": ["exact"], 19 | } 20 | interfaces = (relay.Node,) 21 | 22 | 23 | class Query(graphene.ObjectType): 24 | paged_apartments = DjangoConnectionField(ApartmentNode) 25 | 26 | 27 | schema = graphene.Schema(query=Query) 28 | ``` 29 | 30 | We can also implement a custom Filterset class to have more control over the filtering. 31 | 32 | ```python 33 | from example_project.app.models import Apartment 34 | 35 | from django_filters import FilterSet 36 | 37 | class ApartmentFilterSet(FilterSet): 38 | # Custom filters can be added here 39 | 40 | class Meta: 41 | model = Apartment 42 | fields = [ 43 | "completion_date", 44 | "street_address", 45 | "stair", 46 | "floor", 47 | "apartment_number", 48 | ] 49 | ``` 50 | 51 | These filters are for client side filtering. If you want to do server side filtering, 52 | for example to automatically remove rows the user doesn't have access to, we can use 53 | the `filter_queryset` method of the `DjangoObjectType` class. 54 | 55 | ```python 56 | from django.db.models import QuerySet 57 | from query_optimizer import DjangoObjectType 58 | from query_optimizer.typing import GQLInfo 59 | 60 | class ApartmentType(DjangoObjectType): 61 | @classmethod 62 | def filter_queryset(cls, queryset: QuerySet, info: GQLInfo) -> QuerySet: 63 | # Add your custom filtering here 64 | return queryset.filter(...) 65 | ``` 66 | 67 | The optimizer will find this method and use it automatically when this 68 | object type is used in a query. No additional queries are performed when using 69 | this method as opposed to overriding the `get_queryset` method itself. 70 | 71 | [filters]: https://github.com/carltongibson/django-filter 72 | -------------------------------------------------------------------------------- /docs/fragments.md: -------------------------------------------------------------------------------- 1 | # Fragments 2 | 3 | ## Fragment spreads 4 | 5 | Example query: 6 | 7 | ```graphql 8 | query { 9 | allApartments { 10 | ...Shares 11 | } 12 | } 13 | 14 | fragment Shares on ApartmentType { 15 | sharesStart 16 | sharesEnd 17 | } 18 | ``` 19 | 20 | Fragments spreads like these are optimized without any additional setup. 21 | 22 | ## Inline fragments 23 | 24 | Example query: 25 | 26 | ```graphql 27 | query { 28 | allPeople { 29 | ... on DeveloperType { 30 | name 31 | housingCompanies { 32 | name 33 | } 34 | __typename 35 | } 36 | ... on PropertyManagerType { 37 | name 38 | housingCompanies { 39 | name 40 | } 41 | __typename 42 | } 43 | ... on OwnerType { 44 | name 45 | ownerships { 46 | percentage 47 | } 48 | __typename 49 | } 50 | } 51 | } 52 | ``` 53 | 54 | Inline fragments like these can also be optimized. 55 | Here is how you would construct a resolver like this: 56 | 57 | ```python 58 | import itertools 59 | import graphene 60 | from example_project.app.models import Developer, PropertyManager, Owner 61 | 62 | from query_optimizer import DjangoObjectType, optimize 63 | 64 | class DeveloperType(DjangoObjectType): 65 | class Meta: 66 | model = Developer 67 | 68 | class PropertyManagerType(DjangoObjectType): 69 | class Meta: 70 | model = PropertyManager 71 | 72 | class OwnerType(DjangoObjectType): 73 | class Meta: 74 | model = Owner 75 | 76 | class People(graphene.Union): 77 | class Meta: 78 | types = ( 79 | DeveloperType, 80 | PropertyManagerType, 81 | OwnerType, 82 | ) 83 | 84 | class Query(graphene.ObjectType): 85 | 86 | all_people = graphene.List(People) 87 | 88 | def resolve_all_people(root, info): 89 | developers = optimize(Developer.objects.all(), info) 90 | property_managers = optimize(PropertyManager.objects.all(), info) 91 | owners = optimize(Owner.objects.all(), info) 92 | return itertools.chain(developers, property_managers, owners) 93 | 94 | schema = graphene.Schema(query=Query) 95 | ``` 96 | -------------------------------------------------------------------------------- /docs/index.md: -------------------------------------------------------------------------------- 1 | # Graphene Django Query Optimizer 2 | 3 | [![Coverage Status][coverage-badge]][coverage] 4 | [![GitHub Workflow Status][status-badge]][status] 5 | [![PyPI][pypi-badge]][pypi] 6 | [![GitHub][licence-badge]][licence] 7 | [![GitHub Last Commit][repo-badge]][repo] 8 | [![GitHub Issues][issues-badge]][issues] 9 | [![Downloads][downloads-badge]][pypi] 10 | [![Python Version][version-badge]][pypi] 11 | 12 | ```shell 13 | pip install graphene-django-query-optimizer 14 | ``` 15 | 16 | --- 17 | 18 | **Documentation**: [https://mrthearman.github.io/graphene-django-query-optimizer/](https://mrthearman.github.io/graphene-django-query-optimizer/) 19 | 20 | **Source Code**: [https://github.com/MrThearMan/graphene-django-query-optimizer/](https://github.com/MrThearMan/graphene-django-query-optimizer/) 21 | 22 | **Contributing**: [https://github.com/MrThearMan/graphene-django-query-optimizer/blob/main/CONTRIBUTING.md](https://github.com/MrThearMan/graphene-django-query-optimizer/blob/main/CONTRIBUTING.md) 23 | 24 | --- 25 | 26 | Solve the GraphQL [N+1 problem] in [graphene-django] applications 27 | just by changing a few imports, automatically adding the appropriate 28 | [`only`](https://docs.djangoproject.com/en/dev/ref/models/querysets/#only), 29 | [`select_related`](https://docs.djangoproject.com/en/dev/ref/models/querysets/#select-related), 30 | and [`prefetch_related`](https://docs.djangoproject.com/en/dev/ref/models/querysets/#prefetch-related) 31 | method calls to your QuerySets to fetch _only_ what you need. 32 | 33 | ```python 34 | import graphene 35 | from example_project.app.models import Example 36 | 37 | from query_optimizer import DjangoObjectType, DjangoListField 38 | 39 | class ExampleType(DjangoObjectType): 40 | class Meta: 41 | model = Example 42 | 43 | class Query(graphene.ObjectType): 44 | all_examples = DjangoListField(ExampleType) 45 | 46 | schema = graphene.Schema(query=Query) 47 | ``` 48 | 49 | [coverage-badge]: https://coveralls.io/repos/github/MrThearMan/graphene-django-query-optimizer/badge.svg?branch=main 50 | [coverage]: https://coveralls.io/github/MrThearMan/graphene-django-query-optimizer?branch=main 51 | [downloads-badge]: https://img.shields.io/pypi/dm/graphene-django-query-optimizer 52 | [graphene-django]: https://github.com/graphql-python/graphene-django 53 | [issues-badge]: https://img.shields.io/github/issues-raw/MrThearMan/graphene-django-query-optimizer 54 | [issues]: https://github.com/MrThearMan/graphene-django-query-optimizer/issues 55 | [licence-badge]: https://img.shields.io/github/license/MrThearMan/graphene-django-query-optimizer 56 | [licence]: https://github.com/MrThearMan/graphene-django-query-optimizer/blob/main/LICENSE 57 | [N+1 problem]: https://stackoverflow.com/a/97253 58 | [pypi-badge]: https://img.shields.io/pypi/v/graphene-django-query-optimizer 59 | [pypi]: https://pypi.org/project/graphene-django-query-optimizer 60 | [repo-badge]: https://img.shields.io/github/last-commit/MrThearMan/graphene-django-query-optimizer 61 | [repo]: https://github.com/MrThearMan/graphene-django-query-optimizer/commits/main 62 | [status-badge]: https://img.shields.io/github/actions/workflow/status/MrThearMan/graphene-django-query-optimizer/test.yml?branch=main 63 | [status]: https://github.com/MrThearMan/graphene-django-query-optimizer/actions/workflows/test.yml 64 | [version-badge]: https://img.shields.io/pypi/pyversions/graphene-django-query-optimizer 65 | -------------------------------------------------------------------------------- /docs/quickstart.md: -------------------------------------------------------------------------------- 1 | # Quickstart 2 | 3 | The database schema these examples will be using can be seen [here][schema]. 4 | 5 | Let's say we have defined a graphql schema like this: 6 | 7 | ```python 8 | import graphene 9 | from graphene_django import DjangoObjectType, DjangoListField 10 | from example_project.app.models import Apartment 11 | 12 | class ApartmentType(DjangoObjectType): 13 | class Meta: 14 | model = Apartment 15 | 16 | class Query(graphene.ObjectType): 17 | # Imagine the rest of the types are also here, 18 | # and we omit it for brevity. 19 | all_apartments = DjangoListField(ApartmentType) 20 | 21 | schema = graphene.Schema(query=Query) 22 | ``` 23 | 24 | Now, based on our database schema, we want to make a query like this: 25 | 26 | ```graphql 27 | query { 28 | allApartments { 29 | streetAddress 30 | stair 31 | apartmentNumber 32 | sales { 33 | purchaseDate 34 | ownerships { 35 | percentage 36 | owner { 37 | name 38 | } 39 | } 40 | } 41 | } 42 | } 43 | ``` 44 | 45 | As is, this query will result in: 46 | 47 | - 1 query for all apartments 48 | - 1 query or _**each**_ sale 49 | - 1 query for _**each**_ ownership in _**each**_ sale 50 | - 1 query for _**each**_ owner in _**each**_ ownership in _**each**_ sale 51 | 52 | Let's say that we have: 53 | 54 | - a modest 20 apartments 55 | - each apartment has 3 sales 56 | - each sale has 2 ownerships 57 | 58 | In total, that's... 59 | 60 | ``` 61 | 1 + (20 * 3) + (20 * 3 * 2) + (20 * 3 * 2 * 1) = 301 queries 62 | ``` 63 | 64 | It's important to notice, that the amount of queries is proportional to the 65 | amount of records in our database, so the number of queries is only going to increase. 66 | This is called an [N+1 problem]. 67 | 68 | We are also over-fetching all fields on each model, and thus not taking advantage of 69 | GraphQLs schema at all. 70 | 71 | This is the issue this library hopes to solve. 72 | 73 | > Shoutout to [graphene-django-optimizer][prev], which inspired this library. 74 | > The library seem to no longer work in modern versions of `graphene-django`. 75 | > Hopefully this library can replace it, while offering a cleaner API. 76 | 77 | We can optimize this query by simply using `DjangoObjectType` from `query_optimizer` 78 | instead of `graphene_django` 79 | 80 | ```python 81 | import graphene 82 | from example_project.app.models import Apartment 83 | 84 | from query_optimizer import DjangoListField, DjangoObjectType 85 | 86 | class ApartmentType(DjangoObjectType): 87 | class Meta: 88 | model = Apartment 89 | 90 | class Query(graphene.ObjectType): 91 | all_apartments = DjangoListField(ApartmentType) 92 | 93 | schema = graphene.Schema(query=Query) 94 | ``` 95 | 96 | We could also use the `optimize` function to wrap a custom resolver queryset: 97 | 98 | ```python 99 | import graphene 100 | from query_optimizer import DjangoObjectType, optimize # new import 101 | from example_project.app.models import Apartment 102 | 103 | class ApartmentType(DjangoObjectType): 104 | class Meta: 105 | model = Apartment 106 | 107 | class Query(graphene.ObjectType): 108 | all_apartments = graphene.List(ApartmentType) 109 | 110 | def resolve_all_apartments(root, info): 111 | return optimize(Apartment.objects.all(), info) # wrapped function 112 | 113 | schema = graphene.Schema(query=Query) 114 | ``` 115 | 116 | That's it! 117 | 118 | With the following configuration, the same query will result in 119 | just _**3**_ database queries, regardless of the number of database records. 120 | 121 | - 1 query for all apartments 122 | - 1 query for all sales in all apartments 123 | - 1 query for all ownerships with their owners for each sale in each apartment 124 | 125 | Also, the optimization will only fetch the fields given in the GraphQL query, 126 | as the query intended. 127 | 128 | See [technical details] on how this works. 129 | 130 | 131 | [schema]: https://github.com/MrThearMan/graphene-django-query-optimizer/blob/main/tests/example/models.py 132 | [N+1 problem]: https://stackoverflow.com/a/97253 133 | [prev]: https://github.com/tfoxy/graphene-django-optimizer 134 | [only]: https://docs.djangoproject.com/en/dev/ref/models/querysets/#only 135 | [technical details]: https://mrthearman.github.io/graphene-django-query-optimizer/technical/ 136 | -------------------------------------------------------------------------------- /docs/relay.md: -------------------------------------------------------------------------------- 1 | # Relay 2 | 3 | The optimization will also work with [Relay] Nodes. 4 | 5 | ## Nodes 6 | 7 | Let's say we have the following node in out schema: 8 | 9 | ```python 10 | import graphene 11 | from graphene import relay 12 | from graphene_django import DjangoObjectType 13 | from example_project.app.models import Apartment 14 | 15 | class ApartmentNode(DjangoObjectType): 16 | class Meta: 17 | model = Apartment 18 | filter_fields = { 19 | "street_address": ["exact"], 20 | "building__name": ["exact"], 21 | } 22 | interfaces = (relay.Node,) 23 | 24 | class Query(graphene.ObjectType): 25 | apartment = relay.Node.Field(ApartmentNode) 26 | 27 | schema = graphene.Schema(query=Query) 28 | ``` 29 | 30 | We can optimize this query by simply using `DjangoObjectType` from `query_optimizer`. 31 | 32 | ```python 33 | import graphene 34 | from graphene import relay 35 | from example_project.app.models import Apartment 36 | 37 | from query_optimizer import DjangoObjectType 38 | 39 | class ApartmentNode(DjangoObjectType): 40 | class Meta: 41 | model = Apartment 42 | filter_fields = { 43 | "street_address": ["exact"], 44 | "building__name": ["exact"], 45 | } 46 | interfaces = (relay.Node,) 47 | 48 | class Query(graphene.ObjectType): 49 | apartment = relay.Node.Field(ApartmentNode) 50 | 51 | schema = graphene.Schema(query=Query) 52 | ``` 53 | 54 | That's it! 55 | 56 | ## Connections 57 | 58 | Given the following connection in our schema: 59 | 60 | ```python 61 | import graphene 62 | from graphene import relay 63 | from example_project.app.models import Apartment 64 | 65 | from graphene_django import DjangoObjectType, DjangoConnectionField 66 | 67 | class ApartmentNode(DjangoObjectType): 68 | class Meta: 69 | model = Apartment 70 | interfaces = (relay.Node,) 71 | 72 | class Query(graphene.ObjectType): 73 | paged_apartments = DjangoConnectionField(ApartmentNode) 74 | 75 | schema = graphene.Schema(query=Query) 76 | ``` 77 | 78 | We can optimize this query by simply using `DjangoObjectType` 79 | and `DjangoConnectionField` from `query_optimizer`, like this: 80 | 81 | ```python 82 | import graphene 83 | from graphene import relay 84 | from example_project.app.models import Apartment 85 | 86 | from query_optimizer import DjangoObjectType, DjangoConnectionField 87 | 88 | class ApartmentNode(DjangoObjectType): 89 | class Meta: 90 | model = Apartment 91 | interfaces = (relay.Node,) 92 | 93 | class Query(graphene.ObjectType): 94 | paged_apartments = DjangoConnectionField(ApartmentNode) 95 | 96 | schema = graphene.Schema(query=Query) 97 | ``` 98 | 99 | That's it! 100 | 101 | 102 | [Relay]: https://relay.dev/docs/guides/graphql-server-specification/ 103 | -------------------------------------------------------------------------------- /docs/settings.md: -------------------------------------------------------------------------------- 1 | # Settings 2 | 3 | Here are the available settings. 4 | 5 | | Setting | Type | Default | Description | 6 | |----------------------------------------------------|------|------------------------------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| 7 | | `ALLOW_CONNECTION_AS_DEFAULT_NESTED_TO_MANY_FIELD` | bool | False | Should `DjangoConnectionField` be allowed to be generated for nested to-many fields if the `ObjectType` has a connection? If `False` (default), always use `DjangoListField`s. Doesn't prevent defining a `DjangoConnectionField` on the `ObjectType` manually. | 8 | | `DEFAULT_FILTERSET_CLASS` | str | "" | The default filterset class to use. | 9 | | `DISABLE_ONLY_FIELDS_OPTIMIZATION` | str | False | Set to `True` to disable optimizing fetched fields with `queryset.only()`. | 10 | | `MAX_COMPLEXITY` | int | 10 | Default max number of `select_related` and `prefetch_related` joins optimizer is allowed to optimize. | 11 | | `OPTIMIZER_MARK` | str | "_optimized" | Key used mark if a queryset has been optimized by the query optimizer. | 12 | | `PREFETCH_COUNT_KEY` | str | "_optimizer_count" | Name used for annotating the prefetched queryset total count. | 13 | | `PREFETCH_PARTITION_INDEX` | str | "_optimizer_partition_index" | Name used for aliasing the prefetched queryset partition index. | 14 | | `PREFETCH_SLICE_START` | str | "_optimizer_slice_start" | Name used for aliasing the prefetched queryset slice start. | 15 | | `PREFETCH_SLICE_STOP` | str | "_optimizer_slice_stop" | Name used for aliasing the prefetched queryset slice end. | 16 | | `SKIP_OPTIMIZATION_ON_ERROR` | bool | False | If there is an unexpected error, should the optimizer skip optimization (True) or throw an error (False)? | 17 | | `TOTAL_COUNT_FIELD` | str | "totalCount" | The field name to use for fetching total count in connection fields. | 18 | 19 | Set them under the `GRAPHQL_QUERY_OPTIMIZER` key in your projects `settings.py` like this: 20 | 21 | ```python 22 | GRAPHQL_QUERY_OPTIMIZER = { 23 | "MAX_COMPLEXITY": 10, 24 | } 25 | ``` 26 | -------------------------------------------------------------------------------- /docs/technical.md: -------------------------------------------------------------------------------- 1 | # Technical details 2 | 3 | The optimizer uses the `GraphQLResolveInfo` and GraphQL AST to introspect 4 | the desired query, and construct [queryset.only()][only], 5 | [queryset.select_related()][select], and [queryset.prefetch_related()][prefetch] 6 | statements for the resolver queryset. The queryset is then "marked as optimized" 7 | in the [queryset hints](1) by setting a key defined 8 | by the `OPTIMIZER_MARK` setting. 9 | 10 | > Queryset hints are designed to be used in multi-database routing, so this 11 | > is a slightly hacky way to ensuring the mark is retained when the 12 | > queryset is cloned. It is relatively safe since multi-database routers 13 | > should accept the hints as **kwargs, and can ignore this extra hint. 14 | 15 | 16 | [only]: https://docs.djangoproject.com/en/dev/ref/models/querysets/#only 17 | [select]: https://docs.djangoproject.com/en/dev/ref/models/querysets/#select-related 18 | [prefetch]: https://docs.djangoproject.com/en/dev/ref/models/querysets/#prefetch-related 19 | [extensions]: https://github.com/graphql-python/graphql-core/blob/0c93b8452eed38d4f800c7e71cf6f3f3758cd1c6/src/graphql/type/schema.py#L123 20 | [WeakKeyDictionary]: https://docs.python.org/3/library/weakref.html#weakref.WeakKeyDictionary 21 | [Inline fragments]: https://graphql.org/learn/queries/#inline-fragments 22 | [queryset hints]: https://docs.djangoproject.com/en/4.2/topics/db/multi-db/#hints 23 | -------------------------------------------------------------------------------- /example_project/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MrThearMan/graphene-django-query-optimizer/f8291cd21c215f7a84ad79836c6e40a651d15ee2/example_project/__init__.py -------------------------------------------------------------------------------- /example_project/app/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MrThearMan/graphene-django-query-optimizer/f8291cd21c215f7a84ad79836c6e40a651d15ee2/example_project/app/__init__.py -------------------------------------------------------------------------------- /example_project/app/admin.py: -------------------------------------------------------------------------------- 1 | from django import forms 2 | from django.contrib import admin 3 | from django.contrib.admin import TabularInline 4 | 5 | from example_project.app.models import ( 6 | Apartment, 7 | Building, 8 | Developer, 9 | HousingCompany, 10 | Owner, 11 | Ownership, 12 | PostalCode, 13 | PropertyManager, 14 | RealEstate, 15 | Sale, 16 | ) 17 | 18 | 19 | class PermMixin: 20 | def has_add_permission(self, *args, **kwargs) -> bool: 21 | return False 22 | 23 | def has_change_permission(self, *args, **kwargs) -> bool: 24 | return False 25 | 26 | def has_delete_permission(self, *args, **kwargs) -> bool: 27 | return False 28 | 29 | 30 | class SalesAdminForm(forms.ModelForm): 31 | apartment = forms.ModelChoiceField(queryset=Apartment.objects.all()) 32 | purchase_date = forms.DateField() 33 | purchase_price = forms.DateField() 34 | 35 | class Meta: 36 | model = Sale 37 | fields = "__all__" 38 | exclude = ("apartment", "purchase_date", "purchase_price") 39 | 40 | 41 | class OwnershipAdminForm(forms.ModelForm): 42 | owner = forms.ModelChoiceField(queryset=Owner.objects.all()) 43 | sale = forms.ModelChoiceField(queryset=Sale.objects.all()) 44 | percentage = forms.DecimalField(max_digits=3, decimal_places=0) 45 | 46 | class Meta: 47 | model = Ownership 48 | fields = "__all__" 49 | exclude = ("owner", "sale", "percentage") 50 | 51 | 52 | class HousingCompanyInline(PermMixin, TabularInline): 53 | model = HousingCompany 54 | show_change_link = True 55 | extra = 0 56 | 57 | 58 | class RealEstateInline(PermMixin, TabularInline): 59 | model = RealEstate 60 | show_change_link = True 61 | extra = 0 62 | 63 | 64 | class BuildingInline(PermMixin, TabularInline): 65 | model = Building 66 | show_change_link = True 67 | extra = 0 68 | 69 | 70 | class ApartmentInline(PermMixin, TabularInline): 71 | model = Apartment 72 | show_change_link = True 73 | extra = 0 74 | 75 | 76 | class SaleInline(PermMixin, TabularInline): 77 | model = Sale 78 | form = SalesAdminForm 79 | show_change_link = True 80 | extra = 0 81 | 82 | 83 | class OwnershipInline(PermMixin, TabularInline): 84 | model = Ownership 85 | form = OwnershipAdminForm 86 | show_change_link = True 87 | extra = 0 88 | 89 | 90 | @admin.register(PostalCode) 91 | class PostalCodeAdmin(PermMixin, admin.ModelAdmin): 92 | inlines = [HousingCompanyInline] 93 | 94 | 95 | @admin.register(Developer) 96 | class DeveloperAdmin(PermMixin, admin.ModelAdmin): 97 | pass 98 | 99 | 100 | @admin.register(PropertyManager) 101 | class PropertyManagerAdmin(PermMixin, admin.ModelAdmin): 102 | inlines = [HousingCompanyInline] 103 | 104 | 105 | @admin.register(HousingCompany) 106 | class HousingCompanyAdmin(PermMixin, admin.ModelAdmin): 107 | inlines = [RealEstateInline] 108 | 109 | 110 | @admin.register(RealEstate) 111 | class RealEstateAdmin(PermMixin, admin.ModelAdmin): 112 | inlines = [BuildingInline] 113 | 114 | 115 | @admin.register(Building) 116 | class BuildingAdmin(PermMixin, admin.ModelAdmin): 117 | inlines = [ApartmentInline] 118 | 119 | 120 | @admin.register(Apartment) 121 | class ApartmentAdmin(PermMixin, admin.ModelAdmin): 122 | inlines = [SaleInline] 123 | 124 | 125 | @admin.register(Sale) 126 | class SaleAdmin(PermMixin, admin.ModelAdmin): 127 | inlines = [OwnershipInline] 128 | form = SalesAdminForm 129 | readonly_fields = ("apartment", "purchase_date", "purchase_price") 130 | 131 | 132 | @admin.register(Owner) 133 | class OwnerAdmin(PermMixin, admin.ModelAdmin): 134 | inlines = [OwnershipInline] 135 | 136 | 137 | @admin.register(Ownership) 138 | class OwnershipAdmin(PermMixin, admin.ModelAdmin): 139 | form = OwnershipAdminForm 140 | readonly_fields = ("owner", "sale", "percentage") 141 | -------------------------------------------------------------------------------- /example_project/app/apps.py: -------------------------------------------------------------------------------- 1 | from django.apps import AppConfig 2 | 3 | __all__ = [ 4 | "AppConfig", 5 | ] 6 | 7 | 8 | class ExampeAppConfig(AppConfig): 9 | name = "example_project.app" 10 | -------------------------------------------------------------------------------- /example_project/app/management/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MrThearMan/graphene-django-query-optimizer/f8291cd21c215f7a84ad79836c6e40a651d15ee2/example_project/app/management/__init__.py -------------------------------------------------------------------------------- /example_project/app/management/commands/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MrThearMan/graphene-django-query-optimizer/f8291cd21c215f7a84ad79836c6e40a651d15ee2/example_project/app/management/commands/__init__.py -------------------------------------------------------------------------------- /example_project/app/migrations/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MrThearMan/graphene-django-query-optimizer/f8291cd21c215f7a84ad79836c6e40a651d15ee2/example_project/app/migrations/__init__.py -------------------------------------------------------------------------------- /example_project/app/schema.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import itertools 4 | from typing import TYPE_CHECKING, Optional 5 | 6 | import graphene 7 | from django.db import models 8 | from django.db.models.functions import Concat 9 | from graphene import relay 10 | from graphene_django.debug import DjangoDebug 11 | 12 | from query_optimizer import optimize 13 | from query_optimizer.fields import DjangoConnectionField, DjangoListField 14 | from query_optimizer.selections import get_field_selections 15 | 16 | from .models import Apartment, Developer, Example, HousingCompany, Owner, PropertyManager 17 | from .types import ( 18 | ApartmentNode, 19 | ApartmentType, 20 | BuildingNode, 21 | BuildingType, 22 | ContentTypeType, 23 | DeveloperNode, 24 | DeveloperType, 25 | ExampleType, 26 | HousingCompanyNode, 27 | HousingCompanyType, 28 | OwnershipType, 29 | OwnerType, 30 | People, 31 | PlainObjectType, 32 | PostalCodeType, 33 | PropertyManagerNode, 34 | PropertyManagerType, 35 | RealEstateNode, 36 | RealEstateType, 37 | SaleType, 38 | TagType, 39 | ) 40 | 41 | if TYPE_CHECKING: 42 | from query_optimizer.typing import GQLInfo, Iterable, Union 43 | 44 | 45 | class Query(graphene.ObjectType): 46 | node = relay.Node.Field() 47 | 48 | all_postal_codes = DjangoListField(PostalCodeType) 49 | all_developers = DjangoListField(DeveloperType) 50 | all_property_managers = DjangoListField(PropertyManagerType) 51 | all_housing_companies = DjangoListField(HousingCompanyType) 52 | all_real_estates = DjangoListField(RealEstateType) 53 | all_buildings = DjangoListField(BuildingType) 54 | all_apartments = DjangoListField(ApartmentType) 55 | all_sales = DjangoListField(SaleType) 56 | all_owners = DjangoListField(OwnerType) 57 | all_ownerships = DjangoListField(OwnershipType) 58 | 59 | def resolve_all_apartments(root: None, info: GQLInfo, **kwargs) -> models.QuerySet[Apartment]: 60 | return optimize( 61 | Apartment.objects.all().annotate( 62 | full_address=Concat( 63 | models.F("street_address"), 64 | models.F("floor"), 65 | models.F("apartment_number"), 66 | output_field=models.CharField(), 67 | ), 68 | ), 69 | info, 70 | ) 71 | 72 | housing_company_by_name = graphene.List(HousingCompanyType, name=graphene.String(required=True)) 73 | 74 | def resolve_housing_company_by_name(root: None, info: GQLInfo, name: str) -> models.QuerySet[HousingCompany]: 75 | get_field_selections(info) 76 | return optimize(HousingCompany.objects.filter(name=name), info) 77 | 78 | developer = relay.Node.Field(DeveloperNode) 79 | paged_developers = DjangoConnectionField(DeveloperNode) 80 | apartment = relay.Node.Field(ApartmentNode) 81 | paged_apartments = DjangoConnectionField(ApartmentNode) 82 | building = relay.Node.Field(BuildingNode) 83 | paged_buildings = DjangoConnectionField(BuildingNode) 84 | real_estate = relay.Node.Field(RealEstateNode) 85 | paged_real_estates = DjangoConnectionField(RealEstateNode) 86 | housing_company = relay.Node.Field(HousingCompanyNode) 87 | paged_housing_companies = DjangoConnectionField(HousingCompanyNode) 88 | property_managers = relay.Node.Field(PropertyManagerNode) 89 | paged_property_managers = DjangoConnectionField(PropertyManagerNode) 90 | 91 | all_people = graphene.List(People) 92 | 93 | def resolve_all_people(root: None, info: GQLInfo) -> Iterable[Union[Developer, PropertyManager, Owner]]: 94 | developers = optimize(Developer.objects.all(), info) 95 | property_managers = optimize(PropertyManager.objects.all(), info) 96 | owners = optimize(Owner.objects.all(), info) 97 | return itertools.chain(developers, property_managers, owners) 98 | 99 | all_tags = DjangoListField(TagType) 100 | all_content_types = DjangoListField(ContentTypeType) 101 | 102 | # -------------------------------------------------------------------- 103 | 104 | example = graphene.Field(ExampleType, pk=graphene.Int(required=True)) 105 | examples = DjangoListField(ExampleType) 106 | 107 | def resolve_example(root: None, info: GQLInfo, pk: Optional[int] = None): 108 | return optimize(Example.objects.filter(pk=pk), info).first() 109 | 110 | # -------------------------------------------------------------------- 111 | 112 | debug = graphene.Field(DjangoDebug, name="_debug") 113 | 114 | # -------------------------------------------------------------------- 115 | 116 | plain = graphene.Field(PlainObjectType) 117 | 118 | def resolve_plain(root, info: GQLInfo) -> dict[str, str]: 119 | get_field_selections(info) 120 | return { 121 | "foo": "1", 122 | "bar": { 123 | "x": 1, 124 | }, 125 | } 126 | 127 | 128 | schema = graphene.Schema(query=Query) 129 | -------------------------------------------------------------------------------- /example_project/app/utils.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import traceback 4 | from contextlib import contextmanager 5 | from dataclasses import dataclass 6 | from functools import partial 7 | from pathlib import Path 8 | from typing import TYPE_CHECKING 9 | 10 | import sqlparse 11 | from django import db 12 | from django.db.backends import utils as _django_utils 13 | from django.db.models.sql import compiler as _compiler 14 | from django.db.models.sql import query as _query 15 | from graphene_django import views as _views 16 | from graphene_django.debug.sql import tracking as _tracking 17 | 18 | from example_project.config import logging as _logging 19 | 20 | if TYPE_CHECKING: 21 | from query_optimizer.typing import Any, Callable, Generator 22 | 23 | # Paths for stack trace filtering. 24 | BASE_PATH = str(Path(__file__).parent.parent.parent.resolve()) 25 | SKIP_PATHS = [ 26 | str(Path(_query.__file__).resolve()), 27 | str(Path(_compiler.__file__).resolve()), 28 | str(Path(_tracking.__file__).resolve()), 29 | str(Path(_django_utils.__file__).resolve()), 30 | str(Path(_logging.__file__).resolve()), 31 | ] 32 | STOP_PATH = str(Path(_views.__file__).resolve()) 33 | 34 | 35 | @dataclass 36 | class QueryData: 37 | queries: list[str] 38 | stacks: list[str] 39 | 40 | def __str__(self) -> str: 41 | return f"QueryData with {len(self.queries)} queries." 42 | 43 | def __repr__(self) -> str: 44 | return "QueryData(queries=..., stacks=...)" 45 | 46 | @property 47 | def count(self) -> int: 48 | return len(self.queries) 49 | 50 | @property 51 | def log(self) -> str: 52 | message = "\n" + "-" * 75 53 | message += f"\n>>> Queries ({len(self.queries)}):\n\n" 54 | 55 | query: str 56 | summary: str 57 | for index, (query, summary) in enumerate(zip(self.queries, self.stacks, strict=False)): 58 | message += f"{index + 1})" 59 | message += "\n\n" 60 | message += "--- Query ".ljust(75, "-") 61 | message += "\n\n" 62 | message += sqlparse.format(query, reindent=True) 63 | message += "\n\n" 64 | message += "--- Stack (abridged) ".ljust(75, "-") 65 | message += "\n\n" 66 | message += summary 67 | message += "\n" 68 | message += "-" * 75 69 | message += "\n\n" 70 | 71 | message += "-" * 75 72 | return message 73 | 74 | def __getitem__(self, item: int) -> str: 75 | return self.queries[item] 76 | 77 | 78 | def db_query_logger( 79 | execute: Callable[..., Any], 80 | sql: str, 81 | params: tuple[Any, ...], 82 | many: bool, # noqa: FBT001 83 | context: dict[str, Any], 84 | # Added with functools.partial() 85 | query_data: QueryData, 86 | ) -> Any: 87 | """ 88 | A database query logger for capturing executed database queries. 89 | Used to check that query optimizations work as expected. 90 | 91 | Can also be used as a place to put debugger breakpoint for solving issues. 92 | """ 93 | query_data.stacks.append(get_stack_info()) 94 | 95 | # Don't include transaction creation, as we aren't interested in them. 96 | if not sql.startswith("SAVEPOINT") and not sql.startswith("RELEASE SAVEPOINT"): 97 | try: 98 | query_data.queries.append(sql % params) 99 | except TypeError: 100 | query_data.queries.append(sql) 101 | return execute(sql, params, many, context) 102 | 103 | 104 | def get_stack_info() -> str: 105 | # Get the current stack for debugging purposes. 106 | # Don't include files from the skipped paths. 107 | stack: list[traceback.FrameSummary] = [] 108 | skipped = 0 # How many frames have been skipped? 109 | to_skip = 2 # Skip the first two frames (this func and caller func) 110 | 111 | for frame in reversed(traceback.extract_stack()): 112 | if skipped < to_skip: 113 | skipped += 1 114 | continue 115 | 116 | is_skipped_path = any(frame.filename.startswith(path) for path in SKIP_PATHS) 117 | if is_skipped_path: 118 | continue 119 | 120 | is_stop_path = frame.filename.startswith(STOP_PATH) 121 | if is_stop_path: 122 | break 123 | 124 | stack.insert(0, frame) 125 | 126 | is_own_file = frame.filename.startswith(BASE_PATH) 127 | if is_own_file: 128 | break 129 | 130 | return "".join(traceback.StackSummary.from_list(stack).format()) 131 | 132 | 133 | @contextmanager 134 | def capture_database_queries() -> Generator[QueryData, None, None]: 135 | """Capture results of what database queries were executed. `DEBUG` needs to be set to True.""" 136 | query_data = QueryData(queries=[], stacks=[]) 137 | query_logger = partial(db_query_logger, query_data=query_data) 138 | 139 | with db.connection.execute_wrapper(query_logger): 140 | yield query_data 141 | -------------------------------------------------------------------------------- /example_project/config/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MrThearMan/graphene-django-query-optimizer/f8291cd21c215f7a84ad79836c6e40a651d15ee2/example_project/config/__init__.py -------------------------------------------------------------------------------- /example_project/config/logging.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import traceback 3 | from collections.abc import Callable 4 | from pathlib import Path 5 | from typing import Any 6 | 7 | logger = logging.getLogger(__name__) 8 | 9 | BASE_PATH = str(Path(__file__).resolve().parent.parent) 10 | 11 | 12 | class DotPathFormatter(logging.Formatter): 13 | def format(self, record: logging.LogRecord) -> str: 14 | record.module = self.get_dotpath(record) 15 | return super().format(record) 16 | 17 | def get_dotpath(self, record: logging.LogRecord) -> str: 18 | """Try to fetch the full dot import path for the module the log happened at.""" 19 | # For library logs 20 | split_path = record.pathname.split("site-packages") 21 | if len(split_path) > 1: 22 | return self.format_dotpath(split_path[-1][1:]) 23 | 24 | # For our logs 25 | split_path = record.pathname.split(str(BASE_PATH)) 26 | if len(split_path) > 1: 27 | return self.format_dotpath(split_path[-1][1:]) 28 | 29 | # Fall back to the module name, which doesn't include the full path info 30 | return record.module 31 | 32 | @staticmethod 33 | def format_dotpath(path: str) -> str: 34 | return path.removesuffix(".py").replace("/", ".").replace("\\", ".") 35 | 36 | 37 | class TracebackMiddleware: 38 | def resolve(self, next_func: Callable, root: Any, info: Any, **kwargs: Any) -> Any: 39 | try: 40 | return next_func(root, info, **kwargs) 41 | except Exception as err: # noqa: BLE001 42 | logger.info(traceback.format_exc()) 43 | return err 44 | -------------------------------------------------------------------------------- /example_project/config/settings.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | 3 | from django.core.management.utils import get_random_secret_key 4 | 5 | BASE_DIR = Path(__file__).resolve().parent.parent 6 | 7 | # --- First Party ----------------------------------------------- 8 | 9 | DEBUG = True 10 | SECRET_KEY = get_random_secret_key() 11 | ROOT_URLCONF = "example_project.config.urls" 12 | WSGI_APPLICATION = "example_project.config.wsgi.application" 13 | DEFAULT_AUTO_FIELD = "django.db.models.BigAutoField" 14 | 15 | ALLOWED_HOSTS = [] 16 | 17 | INTERNAL_IPS = [ 18 | "localhost", 19 | "127.0.0.1", 20 | ] 21 | 22 | INSTALLED_APPS = [ 23 | "django.contrib.admin", 24 | "django.contrib.auth", 25 | "django.contrib.contenttypes", 26 | "django.contrib.sessions", 27 | "django.contrib.messages", 28 | "django.contrib.staticfiles", 29 | # "debug_toolbar", 30 | "graphiql_debug_toolbar", 31 | "graphene_django", 32 | "example_project.app", 33 | ] 34 | 35 | MIDDLEWARE = [ 36 | # TODO: Broken, see https://github.com/flavors/django-graphiql-debug-toolbar/pull/27 37 | # "graphiql_debug_toolbar.middleware.DebugToolbarMiddleware", 38 | "django.middleware.security.SecurityMiddleware", 39 | "django.contrib.sessions.middleware.SessionMiddleware", 40 | "django.middleware.common.CommonMiddleware", 41 | "django.middleware.csrf.CsrfViewMiddleware", 42 | "django.contrib.auth.middleware.AuthenticationMiddleware", 43 | "django.contrib.messages.middleware.MessageMiddleware", 44 | "django.middleware.clickjacking.XFrameOptionsMiddleware", 45 | ] 46 | 47 | TEMPLATES = [ 48 | { 49 | "BACKEND": "django.template.backends.django.DjangoTemplates", 50 | "DIRS": [], 51 | "APP_DIRS": True, 52 | "OPTIONS": { 53 | "context_processors": [ 54 | "django.template.context_processors.debug", 55 | "django.template.context_processors.request", 56 | "django.contrib.auth.context_processors.auth", 57 | "django.contrib.messages.context_processors.messages", 58 | ], 59 | }, 60 | }, 61 | ] 62 | 63 | DATABASES = { 64 | "default": { 65 | "ENGINE": "django.db.backends.sqlite3", 66 | "NAME": BASE_DIR / "config" / "testdb", 67 | }, 68 | } 69 | 70 | CACHES = { 71 | "default": { 72 | "BACKEND": "django.core.cache.backends.locmem.LocMemCache", 73 | }, 74 | } 75 | 76 | AUTH_PASSWORD_VALIDATORS = [ 77 | {"NAME": "django.contrib.auth.password_validation.UserAttributeSimilarityValidator"}, 78 | {"NAME": "django.contrib.auth.password_validation.MinimumLengthValidator"}, 79 | {"NAME": "django.contrib.auth.password_validation.CommonPasswordValidator"}, 80 | {"NAME": "django.contrib.auth.password_validation.NumericPasswordValidator"}, 81 | ] 82 | 83 | LOGGING = { 84 | "version": 1, 85 | "disable_existing_loggers": False, 86 | "filters": {}, 87 | "formatters": { 88 | "common": { 89 | "()": "example_project.config.logging.DotPathFormatter", 90 | "format": "{asctime} | {levelname} | {module}.{funcName}:{lineno} | {message}", 91 | "datefmt": "%Y-%m-%dT%H:%M:%S%z", 92 | "style": "{", 93 | }, 94 | }, 95 | "handlers": { 96 | "stdout": { 97 | "class": "logging.StreamHandler", 98 | "formatter": "common", 99 | }, 100 | }, 101 | "root": { 102 | "level": "INFO", 103 | "handlers": ["stdout"], 104 | }, 105 | } 106 | 107 | LANGUAGE_CODE = "en-us" 108 | LANGUAGES = [("en", "English")] 109 | TIME_ZONE = "UTC" 110 | USE_I18N = True 111 | USE_TZ = True 112 | STATIC_URL = "/static/" 113 | 114 | # --- Third Party ----------------------------------------------- 115 | 116 | GRAPHENE = { 117 | "SCHEMA": "example_project.app.schema.schema", 118 | "TESTING_ENDPOINT": "/graphql/", 119 | "MIDDLEWARE": [ 120 | "example_project.config.logging.TracebackMiddleware", 121 | # "graphene_django.debug.DjangoDebugMiddleware", 122 | ], 123 | } 124 | 125 | GRAPHQL_QUERY_OPTIMIZER = { 126 | "MAX_COMPLEXITY": 10, 127 | } 128 | -------------------------------------------------------------------------------- /example_project/config/urls.py: -------------------------------------------------------------------------------- 1 | from django.conf import settings 2 | from django.contrib import admin 3 | from django.urls import include, path 4 | from graphene_django.views import GraphQLView 5 | 6 | urlpatterns = [ 7 | path("graphql/", GraphQLView.as_view(graphiql=True)), 8 | path("admin/", admin.site.urls), 9 | ] 10 | 11 | if "debug_toolbar" in settings.INSTALLED_APPS: 12 | urlpatterns.append(path("__debug__/", include("debug_toolbar.urls"))) 13 | -------------------------------------------------------------------------------- /example_project/config/wsgi.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | from django.core.wsgi import get_wsgi_application 4 | 5 | os.environ.setdefault("DJANGO_SETTINGS_MODULE", "example_project.config.settings") 6 | 7 | application = get_wsgi_application() 8 | -------------------------------------------------------------------------------- /manage.py: -------------------------------------------------------------------------------- 1 | import os 2 | import sys 3 | 4 | 5 | def main() -> None: 6 | os.environ.setdefault("DJANGO_SETTINGS_MODULE", "example_project.config.settings") 7 | from django.core.management import execute_from_command_line 8 | 9 | execute_from_command_line(sys.argv) 10 | 11 | 12 | if __name__ == "__main__": 13 | main() 14 | -------------------------------------------------------------------------------- /mkdocs.yml: -------------------------------------------------------------------------------- 1 | site_name: Graphene Django Query Optimizer 2 | 3 | nav: 4 | - Home: index.md 5 | - Quickstart: quickstart.md 6 | - Relay: relay.md 7 | - Filtering: filtering.md 8 | - Depth Limiting: depth.md 9 | - Fragments: fragments.md 10 | - Custom Fields: custom.md 11 | - Settings: settings.md 12 | - Technical Details: technical.md 13 | 14 | theme: 15 | name: readthedocs 16 | highlightjs: true 17 | hljs_languages: # https://github.com/highlightjs/highlight.js/blob/main/SUPPORTED_LANGUAGES.md 18 | - python 19 | - console 20 | - json 21 | 22 | markdown_extensions: 23 | pymdownx.highlight: 24 | anchor_linenums: true 25 | pymdownx.inlinehilite: 26 | pymdownx.snippets: 27 | pymdownx.superfences: 28 | custom_fences: 29 | - name: mermaid 30 | class: mermaid 31 | format: !!python/name:mermaid2.fence_mermaid 32 | 33 | plugins: 34 | - search: 35 | - mermaid2: 36 | version: 10.3.1 37 | 38 | extra_css: 39 | - css/docs.css 40 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [project] 2 | name = "graphene-django-query-optimizer" 3 | version = "0.10.11" 4 | description = "Automatically optimize SQL queries in Graphene-Django schemas." 5 | authors = [ 6 | { name = "Matti Lamppu", email = "lamppu.matti.akseli@gmail.com" }, 7 | ] 8 | license = "MIT" 9 | readme = "README.md" 10 | keywords = [ 11 | "django", 12 | "graphene", 13 | "sql", 14 | "graphql", 15 | "python", 16 | "query", 17 | "optimizer", 18 | "optimization", 19 | ] 20 | classifiers = [ 21 | "Environment :: Web Environment", 22 | "Operating System :: OS Independent", 23 | "Intended Audience :: Developers", 24 | "Natural Language :: English", 25 | "Programming Language :: Python", 26 | "Programming Language :: Python :: 3", 27 | "Programming Language :: Python :: 3 :: Only", 28 | "Programming Language :: Python :: 3.10", 29 | "Programming Language :: Python :: 3.11", 30 | "Programming Language :: Python :: 3.12", 31 | "Programming Language :: Python :: 3.13", 32 | "Framework :: Django :: 4.2", 33 | "Framework :: Django :: 5.0", 34 | "Framework :: Django :: 5.1", 35 | ] 36 | requires-python = ">=3.10,<4" 37 | dynamic = [ 38 | "dependencies", 39 | ] 40 | 41 | [project.urls] 42 | "Homepage" = "https://mrthearman.github.io/graphene-django-query-optimizer" 43 | "Repository" = "https://github.com/MrThearMan/graphene-django-query-optimizer" 44 | "Bug Tracker" = "https://github.com/MrThearMan/graphene-django-query-optimizer/issues" 45 | "Documentation" = "https://mrthearman.github.io/graphene-django-query-optimizer" 46 | 47 | [tool.poetry] 48 | requires-poetry = ">=2.0.0" 49 | packages = [ 50 | { include = "query_optimizer" }, 51 | ] 52 | 53 | [tool.poetry.dependencies] 54 | Django = ">=4.2" 55 | graphene-django = ">=3.0.0" 56 | django-settings-holder = ">=0.1.2" 57 | django-filter = { version = ">=21.1", optional = true } 58 | 59 | [tool.poetry.group.test.dependencies] 60 | pytest = "8.3.5" 61 | coverage = "7.8.2" 62 | pytest-django = "4.11.1" 63 | tox = "4.26.0" 64 | tox-gh-actions = "3.3.0" 65 | faker = "37.3.0" 66 | factory-boy = "3.3.3" 67 | sqlparse = "0.5.3" 68 | django-graphiql-debug-toolbar = "0.2.0" 69 | py-spy = "0.4.0" 70 | django-filter = "25.1" 71 | 72 | [tool.poetry.group.docs.dependencies] 73 | mkdocs = "1.6.1" 74 | pymdown-extensions = "10.15" 75 | mkdocs-mermaid2-plugin = "1.2.1" 76 | 77 | [tool.poetry.group.lint.dependencies] 78 | pre-commit = "4.2.0" 79 | 80 | [tool.ruff] 81 | fix = true 82 | unsafe-fixes = true 83 | line-length = 120 84 | extend-exclude = [ 85 | "tests*", 86 | ] 87 | lint.typing-modules = [ 88 | "query_optimizer.typing", 89 | ] 90 | lint.explicit-preview-rules = true 91 | lint.preview = true 92 | lint.select = [ 93 | "A", # flake8-builtins 94 | "ANN", # flake8-annotations 95 | "ARG", # flake8-unused-arguments 96 | "B", # flake8-bugbear 97 | "BLE", # flake8-blind-except 98 | "C90", # mccabe 99 | "C4", # flake8-comprehensions 100 | "COM", # flake8-commas 101 | "D200", # pydocstyle: One-line docstring should fit on one line 102 | "D201", # pydocstyle: No blank lines allowed before function docstring (found {num_lines}) 103 | "D202", # pydocstyle: No blank lines allowed after function docstring (found {num_lines}) 104 | "D209", # pydocstyle: Multi-line docstring closing quotes should be on a separate line 105 | "D213", # pydocstyle: Multi-line docstring summary should start at the second line 106 | "DJ", # flake8-django 107 | "DTZ", # flake8-datetimez 108 | "E", # pycodestyle errors 109 | "EM", # flake8-errmsg 110 | "F", # pyflakes 111 | "FA", # flake8-future-annotations 112 | "FBT", # flake8-boolean-trap 113 | "FLY", # flynt 114 | "G", # flake8-logging-format 115 | "I", # isort 116 | "ICN", # flake8-import-conventions 117 | "INP", # flake8-no-pep420 118 | "INT", # flake8-gettext 119 | "ISC", # flake8-implicit-str-concat 120 | "LOG", # flake8-logging 121 | "N", # pep8-naming 122 | "PERF", # perflint 123 | "PGH", # pygrep-hooks 124 | "PIE", # flake8-pie 125 | "PL", # pylint 126 | "PT", # flake8-pytest-style 127 | "PTH", # flake8-use-pathlib 128 | "PYI", # flake8-pyi 129 | "Q", # flake8-quotes 130 | "R", # Refactor 131 | "RET", # flake8-return 132 | "RSE", # flake8-raise 133 | "RUF", # ruff-specific rules 134 | "S", # flake8-bandit 135 | "SIM", # flake8-simplify 136 | "SLF", # flake8-self 137 | "T20", # flake8-print 138 | "TCH", # flake8-type-checking 139 | "TID", # flake8-tidy-imports 140 | "TRY", # tryceratops 141 | "UP", # pyupgrade 142 | "W", # pycodestyle warnings 143 | ] 144 | # Preview rules 145 | lint.extend-select = [ 146 | "B909", # loop-iterator-mutation 147 | "FURB110", # if-exp-instead-of-or-operator 148 | "FURB142", # for-loop-set-mutations 149 | "FURB145", # slice-copy 150 | "FURB171", # single-item-membership-test 151 | "FURB187", # list-reverse-copy 152 | "PLE0307", # invalid-str-return-type 153 | "PLR0916", # too-many-boolean-expressions 154 | "PLR1730", # if-stmt-min-max 155 | "PLR1733", # unnecessary-dict-index-lookup 156 | "PLR1736", # unnecessary-list-index-lookup 157 | "PLR6104", # non-augmented-assignment 158 | "PLW0211", # bad-staticmethod-argument 159 | "PLW0642", # self-or-cls-assignment 160 | "RUF021", # parenthesize-chained-operators 161 | "RUF022", # unsorted-dunder-all 162 | "UP042", # replace-str-enum 163 | ] 164 | lint.ignore = [ 165 | "A005", # Allow shadowing builtin modules 166 | "ANN401", # Any-typing allowed 167 | "ARG002", # Unused method argument 168 | "ARG003", # Unused class method argument 169 | "N805", # First argument of a method should be named `self` 170 | "SLF001", # Accessing private members is allowed 171 | "UP007", # Use `X | Y` for union type annotations 172 | "G004", # Logging statement uses f-string 173 | "S602", # Broken: https://github.com/astral-sh/ruff/issues/4045 174 | "S603", # Broken: https://github.com/astral-sh/ruff/issues/4045 175 | "RUF012", # Mutable class attributes 176 | "DJ001", # Avoid using `django.db.models.Model.objects.create` 177 | # 178 | # Conflicting with ruff-format 179 | # 180 | "COM812", # missing-trailing-comma 181 | "COM819", # prohibited-trailing-comma 182 | "D206", # indent-with-spaces 183 | "D300", # triple-single-quotes 184 | "E111", # indentation-with-invalid-multiple 185 | "E114", # indentation-with-invalid-multiple-comment 186 | "E117", # over-indented 187 | "ISC001", # single-line-implicit-string-concatenation 188 | "ISC002", # multi-line-implicit-string-concatenation 189 | "Q000", # bad-quotes-inline-string 190 | "Q001", # bad-quotes-multiline-string 191 | "Q002", # bad-quotes-docstring 192 | "Q003", # avoidable-escaped-quote 193 | "W191", # tab-indentation 194 | ] 195 | 196 | [tool.ruff.lint.extend-per-file-ignores] 197 | "test_*.py" = [ 198 | "ANN", # No need to annotate tests 199 | "N801", # Allow whatever class names in tests 200 | "N802", # Allow whatever function names in tests 201 | "N815", # Allow whatever variable names in tests 202 | "PLR0915", # Can have as many statements as needed 203 | "PLR2004", # Magic value comparisons are allowed in tests 204 | "S101", # Assert is fine 205 | "S105", # Hardcoded passwords are fine in tests 206 | "S106", # Hardcoded passwords are fine in tests 207 | "S108", # No need to care about insecure temp file usages in tests 208 | "S311", # Pseudo-random generators are fine here 209 | "SLF", # Allow accessing private members in tests 210 | "UP", # No upgrade rules 211 | ] 212 | "example_project*" = [ 213 | "ANN", # No need to annotate tests 214 | "ARG", # Fixtures can be unused 215 | "DJ", # Ignore Django-specific rules 216 | ] 217 | "conftest.py" = [ 218 | "ARG", # Fixtures can be unused 219 | "ANN", # No need to annotate tests 220 | "UP", # No upgrade rules 221 | ] 222 | 223 | [tool.ruff.lint.isort] 224 | combine-as-imports = false 225 | split-on-trailing-comma = false 226 | known-third-party = [ 227 | "django", 228 | ] 229 | known-first-party = [ 230 | "query_optimizer", 231 | ] 232 | 233 | [tool.ruff.lint.flake8-import-conventions] 234 | banned-from = [ 235 | "base64", 236 | "csv", 237 | "datetime", 238 | "hashlib", 239 | "hmac", 240 | "json", 241 | "logging", 242 | "math", 243 | "os", 244 | "pickle", 245 | "random", 246 | "re", 247 | "shutil", 248 | "subprocess", 249 | "sys", 250 | "tempfile", 251 | "time", 252 | "uuid", 253 | "xml", 254 | "yaml", 255 | ] 256 | 257 | [tool.ruff.lint.flake8-self] 258 | extend-ignore-names = [ 259 | "_base_manager", 260 | "_default_manager", 261 | "_meta", 262 | ] 263 | 264 | [tool.ruff.lint.pylint] 265 | max-args = 7 266 | 267 | [tool.coverage.run] 268 | relative_files = true 269 | 270 | [tool.coverage.report] 271 | omit = [ 272 | "tests/*", 273 | "docs/*", 274 | ".venv/*", 275 | ".tox/*", 276 | "example_project/*", 277 | ] 278 | exclude_lines = [ 279 | "if TYPE_CHECKING:", 280 | "except ImportError:", 281 | "pragma: no cover", 282 | "pass", 283 | ] 284 | 285 | [tool.pytest.ini_options] 286 | DJANGO_SETTINGS_MODULE = "example_project.config.settings" 287 | filterwarnings = [ 288 | "ignore::DeprecationWarning", 289 | ] 290 | 291 | [tool.tox] 292 | legacy_tox_ini = """ 293 | [tox] 294 | envlist = py{310, 311, 312, 313}-django{42, 50, 51}-gd{30, 31, 32} 295 | isolated_build = true 296 | 297 | [gh-actions] 298 | python = 299 | 3.10: py310 300 | 3.11: py311 301 | 3.12: py312 302 | 3.13: py313 303 | 304 | [testenv] 305 | allowlist_externals = 306 | poetry 307 | setenv = 308 | PYTHONPATH = {toxinidir} 309 | DJANGO_SETTINGS_MODULE = example_project.config.settings 310 | deps = 311 | django42: Django >=4.2,<4.3 312 | django50: Django >=5.0,<5.1 313 | django51: Django >=5.1,<5.2 314 | 315 | gd30: graphene-django >=3.0.0,<3.1.0 316 | gd31: graphene-django >=3.1.0,<3.2.0 317 | gd32: graphene-django >=3.2.0,<3.3.0 318 | 319 | django-filter 320 | django-settings-holder 321 | typing-extensions 322 | 323 | pytest 324 | coverage 325 | pytest-django 326 | faker 327 | factory-boy 328 | sqlparse 329 | django-graphiql-debug-toolbar 330 | commands = 331 | coverage run -m pytest --disable-warnings 332 | """ 333 | 334 | [build-system] 335 | requires = ["poetry-core>=1.9.0"] 336 | build-backend = "poetry.core.masonry.api" 337 | -------------------------------------------------------------------------------- /query_optimizer/__init__.py: -------------------------------------------------------------------------------- 1 | # Import all converters at the top to make sure they are registered first 2 | from .converters import * # noqa: F403, I001 3 | 4 | from .compiler import optimize, optimize_single 5 | from .fields import ( 6 | DjangoConnectionField, 7 | DjangoListField, 8 | RelatedField, 9 | AnnotatedField, 10 | MultiField, 11 | ManuallyOptimizedField, 12 | ) 13 | from .types import DjangoObjectType 14 | 15 | __all__ = [ 16 | "AnnotatedField", 17 | "DjangoConnectionField", 18 | "DjangoListField", 19 | "DjangoObjectType", 20 | "ManuallyOptimizedField", 21 | "MultiField", 22 | "RelatedField", 23 | "optimize", 24 | "optimize_single", 25 | ] 26 | -------------------------------------------------------------------------------- /query_optimizer/compiler.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import contextlib 4 | from typing import TYPE_CHECKING 5 | 6 | from django.contrib.contenttypes.fields import GenericForeignKey, GenericRelation 7 | from django.db.models import ForeignKey, Manager, ManyToOneRel, Model, QuerySet 8 | from graphene.utils.str_converters import to_snake_case 9 | from graphene_django.utils import maybe_queryset 10 | 11 | from .ast import GraphQLASTWalker 12 | from .errors import OptimizerError 13 | from .optimizer import QueryOptimizer 14 | from .prefetch_hack import fetch_in_context 15 | from .settings import optimizer_settings 16 | from .utils import is_optimized, optimizer_logger, swappable_by_subclassing 17 | 18 | if TYPE_CHECKING: 19 | import graphene 20 | from django.db import models 21 | from graphene.types.definitions import GrapheneObjectType 22 | from graphql import FieldNode 23 | 24 | from .typing import PK, GQLInfo, Optional, TModel, ToManyField, ToOneField, Union 25 | 26 | 27 | __all__ = [ 28 | "OptimizationCompiler", 29 | "optimize", 30 | "optimize_single", 31 | ] 32 | 33 | 34 | def optimize( 35 | queryset: QuerySet[TModel], 36 | info: GQLInfo, 37 | *, 38 | max_complexity: Optional[int] = None, 39 | ) -> QuerySet[TModel]: 40 | """Optimize the given queryset according to the field selections received in the GraphQLResolveInfo.""" 41 | optimizer = OptimizationCompiler(info, max_complexity=max_complexity).compile(queryset) 42 | if optimizer is not None: 43 | queryset = optimizer.optimize_queryset(queryset) 44 | fetch_in_context(queryset, info) 45 | 46 | return queryset 47 | 48 | 49 | def optimize_single( 50 | queryset: QuerySet[TModel], 51 | info: GQLInfo, 52 | *, 53 | pk: PK, 54 | max_complexity: Optional[int] = None, 55 | ) -> Optional[TModel]: 56 | """Optimize the given queryset for a single model instance by its primary key.""" 57 | optimizer = OptimizationCompiler(info, max_complexity=max_complexity).compile(queryset) 58 | if optimizer is None: # pragma: no cover 59 | return queryset.filter(pk=pk).first() 60 | 61 | queryset = optimizer.optimize_queryset(queryset.filter(pk=pk)) 62 | fetch_in_context(queryset, info) 63 | 64 | # Shouldn't use .first(), as it can apply additional ordering, which would cancel the optimization. 65 | # The queryset should have the right model instance, since we started by filtering by its pk, 66 | # so we can just pick that out of the result cache (if it hasn't been filtered out). 67 | return next(iter(queryset), None) 68 | 69 | 70 | @swappable_by_subclassing 71 | class OptimizationCompiler(GraphQLASTWalker): 72 | """Class for compiling SQL optimizations based on the given query.""" 73 | 74 | def __init__(self, info: GQLInfo, max_complexity: Optional[int] = None) -> None: 75 | """ 76 | Initialize the optimization compiler with the query info. 77 | 78 | :param info: The GraphQLResolveInfo containing the query AST. 79 | :param max_complexity: How many 'select_related' and 'prefetch_related' table joins are allowed. 80 | Used to protect from malicious queries. 81 | """ 82 | self.max_complexity = max_complexity or optimizer_settings.MAX_COMPLEXITY 83 | self.optimizer: QueryOptimizer = None # type: ignore[assignment] 84 | self.to_attr: Optional[str] = None 85 | super().__init__(info) 86 | 87 | def compile(self, queryset: Union[QuerySet, Manager, list[Model]]) -> Optional[QueryOptimizer]: 88 | """ 89 | Compile optimizations for the given queryset. 90 | 91 | :return: QueryOptimizer instance that can perform any needed optimization, 92 | or None if queryset is already optimized. 93 | :raises OptimizerError: Something went wrong during the optimization process. 94 | """ 95 | queryset = maybe_queryset(queryset) 96 | # If prior optimization has been done already, return early. 97 | if is_optimized(queryset): 98 | return None 99 | 100 | # Setup initial state. 101 | self.model = queryset.model 102 | self.optimizer = QueryOptimizer(model=queryset.model, info=self.info) 103 | 104 | # Walk the query AST to compile the optimizations. 105 | try: 106 | self.run() 107 | 108 | # Allow known errors to be raised. 109 | except OptimizerError: # pragma: no cover 110 | raise 111 | 112 | # Raise unknown errors if not allowed to skip optimization on error. 113 | except Exception as error: # pragma: no cover 114 | optimizer_logger.warning("Something went wrong during the optimization process.", exc_info=error) 115 | if not optimizer_settings.SKIP_OPTIMIZATION_ON_ERROR: 116 | raise 117 | return None 118 | 119 | return self.optimizer 120 | 121 | def increase_complexity(self) -> None: 122 | super().increase_complexity() 123 | if self.complexity > self.max_complexity: 124 | msg = f"Query complexity exceeds the maximum allowed of {self.max_complexity}" 125 | raise OptimizerError(msg) 126 | 127 | def handle_normal_field(self, field_type: GrapheneObjectType, field_node: FieldNode, field: models.Field) -> None: 128 | self.optimizer.only_fields.append(field.get_attname()) 129 | 130 | def handle_to_one_field( 131 | self, 132 | field_type: GrapheneObjectType, 133 | field_node: FieldNode, 134 | related_field: ToOneField, 135 | related_model: type[Model] | None, 136 | ) -> None: 137 | name = self.get_related_field_name(related_field) 138 | optimizer = QueryOptimizer(model=related_model, info=self.info, name=name, parent=self.optimizer) 139 | 140 | if isinstance(related_field, GenericForeignKey): 141 | optimizer = self.optimizer.prefetch_related.setdefault(name, optimizer) 142 | else: 143 | optimizer = self.optimizer.select_related.setdefault(name, optimizer) 144 | 145 | if isinstance(related_field, ForeignKey): 146 | self.optimizer.related_fields.append(related_field.attname) 147 | 148 | if isinstance(related_field, GenericForeignKey): 149 | self.optimizer.related_fields.append(related_field.ct_field) 150 | self.optimizer.related_fields.append(related_field.fk_field) 151 | 152 | with self.use_optimizer(optimizer): 153 | super().handle_to_one_field(field_type, field_node, related_field, related_model) 154 | 155 | def handle_to_many_field( 156 | self, 157 | field_type: GrapheneObjectType, 158 | field_node: FieldNode, 159 | related_field: ToManyField, 160 | related_model: type[Model] | None, 161 | ) -> None: 162 | name = self.get_related_field_name(related_field) 163 | alias = getattr(field_node.alias, "value", None) 164 | key = self.to_attr if self.to_attr is not None else alias if alias is not None else name 165 | self.to_attr = None 166 | 167 | optimizer = QueryOptimizer(model=related_model, info=self.info, name=name, parent=self.optimizer) 168 | optimizer = self.optimizer.prefetch_related.setdefault(key, optimizer) 169 | 170 | if isinstance(related_field, ManyToOneRel): 171 | optimizer.related_fields.append(related_field.field.attname) 172 | 173 | if isinstance(related_field, GenericRelation): 174 | optimizer.related_fields.append(related_field.object_id_field_name) 175 | optimizer.related_fields.append(related_field.content_type_field_name) 176 | 177 | with self.use_optimizer(optimizer): 178 | super().handle_to_many_field(field_type, field_node, related_field, related_model) 179 | 180 | def handle_total_count(self, field_type: GrapheneObjectType, field_node: FieldNode) -> None: 181 | self.optimizer.total_count = True 182 | 183 | def handle_custom_field(self, field_type: GrapheneObjectType, field_node: FieldNode) -> None: 184 | field_name = to_snake_case(field_node.name.value) 185 | field: Optional[graphene.Field] = field_type.graphene_type._meta.fields.get(field_name) 186 | if field is None: # pragma: no cover 187 | msg = ( 188 | f"Field '{field_node.name.value}' not found from object type '{field_type.graphene_type}'. " 189 | f"Cannot optimize custom field." 190 | ) 191 | optimizer_logger.warning(msg) 192 | return None 193 | 194 | # `RelatedField`, `DjangoListField` and `DjangoConnectionField` can define a 195 | # 'field_name' attribute to specify the actual model field name. 196 | actual_field_name: Optional[str] = getattr(field, "field_name", None) 197 | if actual_field_name is not None: 198 | self.to_attr = field_name 199 | return self.handle_model_field(field_type, field_node, actual_field_name) 200 | 201 | if hasattr(field, "optimizer_hook") and callable(field.optimizer_hook): 202 | field.optimizer_hook(self) 203 | return None 204 | 205 | return None # pragma: no cover 206 | 207 | @contextlib.contextmanager 208 | def use_optimizer(self, optimizer: QueryOptimizer) -> None: 209 | orig_optimizer = self.optimizer 210 | try: 211 | self.optimizer = optimizer 212 | yield 213 | finally: 214 | self.optimizer = orig_optimizer 215 | -------------------------------------------------------------------------------- /query_optimizer/converters.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from typing import TYPE_CHECKING 4 | 5 | import graphene 6 | from django.db import models 7 | from graphene_django.converter import convert_django_field, get_django_field_description 8 | 9 | from .settings import optimizer_settings 10 | 11 | if TYPE_CHECKING: 12 | from graphene_django.registry import Registry 13 | 14 | from query_optimizer import DjangoObjectType 15 | from query_optimizer.fields import DjangoConnectionField, DjangoListField 16 | from query_optimizer.typing import Optional, Union 17 | 18 | __all__ = [ 19 | "convert_to_many_field", 20 | "convert_to_one_field", 21 | ] 22 | 23 | 24 | @convert_django_field.register(models.OneToOneField) 25 | @convert_django_field.register(models.ForeignKey) 26 | @convert_django_field.register(models.OneToOneRel) 27 | def convert_to_one_field( 28 | field, # noqa: ANN001 29 | registry: Optional[Registry] = None, 30 | ) -> graphene.Dynamic: 31 | def dynamic_type() -> Optional[graphene.Field]: 32 | type_: Optional[type[DjangoObjectType]] = registry.get_type_for_model(field.related_model) 33 | if type_ is None: # pragma: no cover 34 | return None 35 | 36 | actual_field = field.field if isinstance(field, models.OneToOneRel) else field 37 | description: str = get_django_field_description(actual_field) 38 | required: bool = False if isinstance(field, models.OneToOneRel) else not field.null 39 | 40 | from query_optimizer.fields import RelatedField 41 | 42 | return RelatedField( 43 | type_, 44 | description=description, 45 | required=required, 46 | ) 47 | 48 | return graphene.Dynamic(dynamic_type) 49 | 50 | 51 | @convert_django_field.register(models.ManyToManyField) 52 | @convert_django_field.register(models.ManyToManyRel) 53 | @convert_django_field.register(models.ManyToOneRel) 54 | def convert_to_many_field( 55 | field, # noqa: ANN001 56 | registry: Optional[Registry] = None, 57 | ) -> graphene.Dynamic: 58 | def dynamic_type() -> Union[DjangoConnectionField, DjangoListField, None]: 59 | type_: Optional[type[DjangoObjectType]] = registry.get_type_for_model(field.related_model) 60 | if type_ is None: # pragma: no cover 61 | return None 62 | 63 | actual_field = field if isinstance(field, models.ManyToManyField) else field.field 64 | description: str = get_django_field_description(actual_field) 65 | required: bool = True # will always return a queryset, even if empty 66 | 67 | from query_optimizer.fields import DjangoConnectionField, DjangoListField 68 | 69 | if type_._meta.connection and optimizer_settings.ALLOW_CONNECTION_AS_DEFAULT_NESTED_TO_MANY_FIELD: 70 | return DjangoConnectionField( # pragma: no cover 71 | type_, 72 | required=required, 73 | description=description, 74 | ) 75 | return DjangoListField( 76 | type_, 77 | required=required, 78 | description=description, 79 | ) 80 | 81 | return graphene.Dynamic(dynamic_type) 82 | -------------------------------------------------------------------------------- /query_optimizer/errors.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | __all__ = [ 4 | "OptimizerError", 5 | ] 6 | 7 | 8 | class OptimizerError(Exception): 9 | """Expected error during the optimization compilation process.""" 10 | -------------------------------------------------------------------------------- /query_optimizer/filter.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from functools import cache 4 | from typing import TYPE_CHECKING 5 | 6 | from django.utils.module_loading import import_string 7 | from django_filters import FilterSet 8 | from graphene_django.filter.utils import replace_csv_filters 9 | 10 | from .settings import optimizer_settings 11 | 12 | if TYPE_CHECKING: 13 | from django.db import models 14 | 15 | 16 | __all__ = [ 17 | "FilterSet", 18 | "create_filterset", 19 | "default_filterset_class", 20 | ] 21 | 22 | 23 | @cache 24 | def default_filterset_class() -> type[FilterSet]: 25 | if optimizer_settings.DEFAULT_FILTERSET_CLASS: # pragma: no cover 26 | return import_string(optimizer_settings.DEFAULT_FILTERSET_CLASS) 27 | return FilterSet 28 | 29 | 30 | def create_filterset( 31 | model: type[models.Model], 32 | fields: dict[str, list[str]], 33 | ) -> type[FilterSet]: 34 | name = f"{model._meta.object_name}FilterSet" 35 | meta = type("Meta", (), {"model": model, "fields": fields}) 36 | base = default_filterset_class() 37 | filterset_class: type[FilterSet] = type(name, (base,), {"Meta": meta}) # type: ignore[attr-defined] 38 | replace_csv_filters(filterset_class) 39 | return filterset_class 40 | -------------------------------------------------------------------------------- /query_optimizer/filter_info.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import contextlib 4 | from typing import TYPE_CHECKING 5 | 6 | import graphene 7 | from graphene.utils.str_converters import to_snake_case 8 | from graphene_django.settings import graphene_settings 9 | from graphene_django.utils import DJANGO_FILTER_INSTALLED 10 | from graphql import FieldNode, get_argument_values 11 | from graphql.execution.execute import get_field_def 12 | 13 | from .ast import GrapheneType, GraphQLASTWalker, Selections, get_underlying_type, is_connection, is_node 14 | from .typing import GQLInfo, GraphQLFilterInfo, ToManyField, ToOneField 15 | from .utils import swappable_by_subclassing 16 | 17 | if TYPE_CHECKING: 18 | from django.db.models import Model 19 | from graphene.types.definitions import GrapheneObjectType 20 | 21 | from .typing import Any, Optional 22 | 23 | 24 | __all__ = [ 25 | "get_filter_info", 26 | ] 27 | 28 | 29 | def get_filter_info(info: GQLInfo, model: type[Model]) -> GraphQLFilterInfo: 30 | """Compile filter information included in the GraphQL query.""" 31 | compiler = FilterInfoCompiler(info, model) 32 | compiler.run() 33 | # Return the compiled filter info, or an empty dict if there is no filter info. 34 | name = getattr(info.field_nodes[0].alias, "value", None) or to_snake_case(info.field_name) 35 | return compiler.filter_info.get(name, {}) 36 | 37 | 38 | @swappable_by_subclassing 39 | class FilterInfoCompiler(GraphQLASTWalker): 40 | """Class for compiling filtering information from a GraphQL query.""" 41 | 42 | def __init__(self, *args: Any, **kwargs: Any) -> None: 43 | self.filter_info: dict[str, GraphQLFilterInfo] = {} 44 | super().__init__(*args, **kwargs) 45 | 46 | def add_filter_info(self, parent_type: GrapheneObjectType, field_node: FieldNode) -> None: 47 | """ 48 | Adds filter info for a field. 49 | Called for all field nodes that match a database relation. 50 | 51 | :param parent_type: Parent object type. 52 | :param field_node: FieldNode for the relation. 53 | """ 54 | graphql_field = get_field_def(self.info.schema, parent_type, field_node) 55 | graphene_type = get_underlying_type(graphql_field.type) 56 | 57 | field_name = self.get_field_name(field_node) 58 | orig_field_name = to_snake_case(field_node.name.value) 59 | filters = get_argument_values(graphql_field, field_node, self.info.variable_values) 60 | 61 | is_node_ = is_node(graphql_field) 62 | is_connection_ = is_connection(graphene_type) 63 | 64 | # Find the field-specific limit, or use the default limit. 65 | max_limit: Optional[int] = getattr( 66 | getattr(parent_type.graphene_type, orig_field_name, None), 67 | "max_limit", 68 | graphene_settings.RELAY_CONNECTION_MAX_LIMIT, 69 | ) 70 | 71 | self.filter_info[field_name] = GraphQLFilterInfo( 72 | name=graphene_type.name, 73 | # If the field is a relay node field, its `id` field should not be counted as a filter. 74 | filters={} if is_node_ else filters, 75 | children={}, 76 | filterset_class=None, 77 | is_connection=is_connection_, 78 | is_node=is_node_, 79 | max_limit=max_limit, 80 | ) 81 | 82 | if DJANGO_FILTER_INSTALLED and hasattr(graphene_type, "graphene_type"): 83 | object_type = graphene_type.graphene_type 84 | if is_connection_: 85 | object_type = object_type._meta.node 86 | 87 | self.filter_info[field_name]["filterset_class"] = getattr(object_type._meta, "filterset_class", None) 88 | 89 | def handle_selections(self, field_type: GrapheneType, selections: Selections) -> None: 90 | super().handle_selections(field_type, selections) 91 | # Remove filter info that do not have filters or children. 92 | # Preserve filter info for connections so that default nested limiting can be applied. 93 | for name in list(self.filter_info): 94 | info = self.filter_info[name] 95 | if not (info["filters"] or info["children"] or info["is_connection"]): 96 | del self.filter_info[name] 97 | 98 | def handle_query_class(self, field_type: GrapheneObjectType, field_node: FieldNode) -> None: 99 | self.add_filter_info(field_type, field_node) 100 | with self.child_filter_info(field_node): 101 | super().handle_query_class(field_type, field_node) 102 | 103 | def handle_custom_field(self, field_type: GrapheneObjectType, field_node: FieldNode) -> None: 104 | field_name = to_snake_case(field_node.name.value) 105 | field = getattr(field_type.graphene_type, field_name, None) 106 | if isinstance(field, graphene.Field): 107 | self.add_filter_info(field_type, field_node) 108 | 109 | def handle_to_one_field( 110 | self, 111 | field_type: GrapheneObjectType, 112 | field_node: FieldNode, 113 | related_field: ToOneField, 114 | related_model: type[Model] | None, 115 | ) -> None: 116 | self.add_filter_info(field_type, field_node) 117 | with self.child_filter_info(field_node): 118 | return super().handle_to_one_field(field_type, field_node, related_field, related_model) 119 | 120 | def handle_to_many_field( 121 | self, 122 | field_type: GrapheneObjectType, 123 | field_node: FieldNode, 124 | related_field: ToManyField, 125 | related_model: type[Model] | None, 126 | ) -> None: 127 | self.add_filter_info(field_type, field_node) 128 | with self.child_filter_info(field_node): 129 | return super().handle_to_many_field(field_type, field_node, related_field, related_model) 130 | 131 | @contextlib.contextmanager 132 | def child_filter_info(self, field_node: FieldNode) -> None: 133 | field_name = self.get_field_name(field_node) 134 | arguments: dict[str, GraphQLFilterInfo] = {} 135 | orig_arguments = self.filter_info 136 | try: 137 | self.filter_info = arguments 138 | yield 139 | finally: 140 | self.filter_info = orig_arguments 141 | if arguments: 142 | self.filter_info[field_name]["children"] = arguments 143 | -------------------------------------------------------------------------------- /query_optimizer/prefetch_hack.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import contextlib 4 | from collections import defaultdict 5 | from contextlib import nullcontext 6 | from copy import deepcopy 7 | from functools import partial 8 | from typing import TYPE_CHECKING 9 | from unittest.mock import patch 10 | from weakref import WeakKeyDictionary 11 | 12 | from django.db import models 13 | from django.db.models.fields.related_descriptors import _filter_prefetch_queryset 14 | 15 | if TYPE_CHECKING: 16 | from graphql import OperationDefinitionNode 17 | 18 | from .typing import GQLInfo, TModel, ToManyField, TypeAlias 19 | 20 | __all__ = [ 21 | "_register_for_prefetch_hack", 22 | "fetch_context", 23 | ] 24 | 25 | 26 | _PrefetchCacheType: TypeAlias = defaultdict[str, defaultdict[str, set[str]]] 27 | _PREFETCH_HACK_CACHE: WeakKeyDictionary[OperationDefinitionNode, _PrefetchCacheType] = WeakKeyDictionary() 28 | 29 | 30 | def _register_for_prefetch_hack(info: GQLInfo, field: ToManyField) -> None: 31 | # Registers the through table of a many-to-many field for the prefetch hack. 32 | # See `_prefetch_hack` for more information. 33 | if not isinstance(field, models.ManyToManyField | models.ManyToManyRel): 34 | return 35 | 36 | forward_field: models.ManyToManyField = field.remote_field if isinstance(field, models.ManyToManyRel) else field 37 | db_table = field.related_model._meta.db_table 38 | field_name = field.remote_field.name 39 | through = forward_field.m2m_db_table() 40 | 41 | # Use the `info.operation` as the key to make sure the saved values are cleared after the request ends. 42 | cache = _PREFETCH_HACK_CACHE.setdefault(info.operation, defaultdict(lambda: defaultdict(set))) 43 | cache[db_table][field_name].add(through) 44 | 45 | 46 | def _prefetch_hack( 47 | queryset: models.QuerySet, 48 | field_name: str, 49 | instances: list[models.Model], 50 | *, 51 | cache: _PrefetchCacheType, 52 | ) -> models.QuerySet: 53 | """ 54 | Patches the prefetch mechanism to not create duplicate joins in the SQL query. 55 | This is needed due to how filtering with many-to-many relations is implemented in Django, 56 | which creates new joins con consecutive filters for the same relation. 57 | See: https://docs.djangoproject.com/en/dev/topics/db/queries/#spanning-multi-valued-relationships 58 | 59 | For nested connection fields, a window function with a partition on the many-to-many field 60 | is used to limit the queryset. This adds an OUTER join for the through table of the many-to-many field. 61 | Then, when the prefetch queryset is filtered by `_filter_prefetch_queryset` to just the instances 62 | from the parent model, an INNER join is added to the through table. This creates unnecessary duplicates 63 | in the SQL query, which messes up the window function's partitioning. Therefore, this hack is needed 64 | to prevent the INNER join from being added. 65 | """ 66 | # 67 | # `filter_is_sticky` is set here just to prevent the `used_aliases` from being cleared 68 | # when the queryset is cloned for filtering in `_filter_prefetch_queryset`. 69 | # See: `django.db.models.sql.query.Query.chain`. 70 | queryset.query.filter_is_sticky = True 71 | # 72 | # Add the registered through tables for a given model and field to the Query's `used_aliases`. 73 | # This is passed along during the filtering that happens as a part of `_filter_prefetch_queryset`, 74 | # until `django.db.models.sql.query.Query.join`, which has access to it with its `reuse` argument. 75 | # There, this should prevent the method from adding a duplicate join. 76 | queryset.query.used_aliases = cache[queryset.model._meta.db_table][field_name] 77 | 78 | return _filter_prefetch_queryset(queryset, field_name, instances) 79 | 80 | 81 | def _hack_context(cache: _PrefetchCacheType) -> patch: 82 | return patch( 83 | f"{_filter_prefetch_queryset.__module__}.{_filter_prefetch_queryset.__name__}", 84 | side_effect=partial(_prefetch_hack, cache=cache), 85 | ) 86 | 87 | 88 | @contextlib.contextmanager 89 | def fetch_context(info: GQLInfo) -> contextlib.AbstractContextManager: 90 | """Patches the prefetch mechanism if required.""" 91 | context = nullcontext() 92 | if info.operation in _PREFETCH_HACK_CACHE: 93 | context = _hack_context(cache=deepcopy(_PREFETCH_HACK_CACHE[info.operation])) 94 | 95 | try: 96 | with context: 97 | yield 98 | finally: 99 | _PREFETCH_HACK_CACHE.clear() 100 | 101 | 102 | def fetch_in_context(queryset: models.QuerySet[TModel], info: GQLInfo) -> list[TModel]: 103 | """Evaluates the queryset with the prefetch hack applied.""" 104 | with fetch_context(info): 105 | return list(queryset) # the database query is executed here 106 | -------------------------------------------------------------------------------- /query_optimizer/py.typed: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MrThearMan/graphene-django-query-optimizer/f8291cd21c215f7a84ad79836c6e40a651d15ee2/query_optimizer/py.typed -------------------------------------------------------------------------------- /query_optimizer/selections.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import contextlib 4 | from typing import TYPE_CHECKING 5 | 6 | from graphene.utils.str_converters import to_snake_case 7 | 8 | from query_optimizer.ast import GraphQLASTWalker, get_selections 9 | 10 | from .utils import swappable_by_subclassing 11 | 12 | if TYPE_CHECKING: 13 | from django.db import models 14 | from graphene.types.definitions import GrapheneObjectType 15 | from graphql import FieldNode 16 | 17 | from .typing import Any, GQLInfo, Optional, ToManyField, ToOneField 18 | 19 | 20 | __all__ = [ 21 | "get_field_selections", 22 | ] 23 | 24 | 25 | def get_field_selections(info: GQLInfo, model: Optional[type[models.Model]] = None) -> list[Any]: 26 | """Compile filter information included in the GraphQL query.""" 27 | compiler = FieldSelectionCompiler(info, model) 28 | compiler.run() 29 | return compiler.field_selections[0][to_snake_case(info.field_name)] 30 | 31 | 32 | @swappable_by_subclassing 33 | class FieldSelectionCompiler(GraphQLASTWalker): 34 | """Class for compiling filtering information from a GraphQL query.""" 35 | 36 | def __init__(self, *args: Any, **kwargs: Any) -> None: 37 | self.field_selections: list[Any] = [] 38 | super().__init__(*args, **kwargs) 39 | 40 | def handle_query_class(self, field_type: GrapheneObjectType, field_node: FieldNode) -> None: 41 | with self.child_selections(field_node): 42 | return super().handle_query_class(field_type, field_node) 43 | 44 | def handle_graphql_builtin(self, field_type: GrapheneObjectType, field_node: FieldNode) -> None: 45 | self.field_selections.append(to_snake_case(field_node.name.value)) # pragma: no cover 46 | 47 | def handle_plain_object_type(self, field_type: GrapheneObjectType, field_node: FieldNode) -> None: 48 | selections = get_selections(field_node) 49 | 50 | if not selections: 51 | self.field_selections.append(to_snake_case(field_node.name.value)) 52 | return None 53 | 54 | graphene_type = self.get_graphene_type(field_type, field_node) 55 | with self.child_selections(field_node): 56 | return self.handle_selections(graphene_type, selections) 57 | 58 | def handle_normal_field(self, field_type: GrapheneObjectType, field_node: FieldNode, field: models.Field) -> None: 59 | self.field_selections.append(to_snake_case(field_node.name.value)) 60 | 61 | def handle_custom_field(self, field_type: GrapheneObjectType, field_node: FieldNode) -> None: 62 | self.field_selections.append(to_snake_case(field_node.name.value)) # pragma: no cover 63 | 64 | def handle_to_one_field( 65 | self, 66 | field_type: GrapheneObjectType, 67 | field_node: FieldNode, 68 | related_field: ToOneField, 69 | related_model: type[models.Model] | None, 70 | ) -> None: 71 | with self.child_selections(field_node): 72 | return super().handle_to_many_field(field_type, field_node, related_field, related_model) 73 | 74 | def handle_to_many_field( 75 | self, 76 | field_type: GrapheneObjectType, 77 | field_node: FieldNode, 78 | related_field: ToManyField, 79 | related_model: type[models.Model] | None, 80 | ) -> None: 81 | with self.child_selections(field_node): 82 | return super().handle_to_one_field(field_type, field_node, related_field, related_model) 83 | 84 | @contextlib.contextmanager 85 | def child_selections(self, field_node: FieldNode) -> None: 86 | field_name = to_snake_case(field_node.name.value) 87 | selections: list[Any] = [] 88 | orig_selections = self.field_selections 89 | try: 90 | self.field_selections = selections 91 | yield 92 | finally: 93 | self.field_selections = orig_selections 94 | if selections: 95 | self.field_selections.append({field_name: selections}) 96 | -------------------------------------------------------------------------------- /query_optimizer/settings.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from typing import TYPE_CHECKING 4 | 5 | from django.test.signals import setting_changed # type: ignore[attr-defined] 6 | from settings_holder import SettingsHolder, reload_settings 7 | 8 | from .typing import NamedTuple 9 | 10 | if TYPE_CHECKING: 11 | from .typing import Any, Union 12 | 13 | 14 | __all__ = [ 15 | "optimizer_settings", 16 | ] 17 | 18 | 19 | SETTING_NAME: str = "GRAPHQL_QUERY_OPTIMIZER" 20 | 21 | 22 | class DefaultSettings(NamedTuple): 23 | ALLOW_CONNECTION_AS_DEFAULT_NESTED_TO_MANY_FIELD: bool = False 24 | """ 25 | Should DjangoConnectionField be allowed to be generated for nested to-many fields 26 | if the ObjectType has a connection? If False (default), always use DjangoListFields. 27 | Doesn't prevent defining a DjangoConnectionField on the ObjectType manually. 28 | """ 29 | 30 | DEFAULT_FILTERSET_CLASS: str = "" 31 | """The default filterset class to use.""" 32 | 33 | DISABLE_ONLY_FIELDS_OPTIMIZATION: bool = False 34 | """Disable optimizing fetched fields with `queryset.only()`.""" 35 | 36 | MAX_COMPLEXITY: int = 10 37 | """Default max number of 'select_related' and 'prefetch related' joins optimizer is allowed to optimize.""" 38 | 39 | OPTIMIZER_MARK: str = "_optimized" 40 | """Key used mark if a queryset has been optimized by the query optimizer.""" 41 | 42 | PREFETCH_COUNT_KEY: str = "_optimizer_count" 43 | """Name used for annotating the prefetched queryset total count.""" 44 | 45 | PREFETCH_PARTITION_INDEX: str = "_optimizer_partition_index" 46 | """Name used for aliasing the prefetched queryset partition index.""" 47 | 48 | PREFETCH_SLICE_START: str = "_optimizer_slice_start" 49 | """Name used for aliasing the prefetched queryset slice start.""" 50 | 51 | PREFETCH_SLICE_STOP: str = "_optimizer_slice_stop" 52 | """Name used for aliasing the prefetched queryset slice end.""" 53 | 54 | SKIP_OPTIMIZATION_ON_ERROR: bool = False 55 | """If there is an unexpected error, should the optimizer skip optimization (True) or throw an error (False)?""" 56 | 57 | TOTAL_COUNT_FIELD: str = "totalCount" 58 | """The field name to use for fetching total count in connection fields.""" 59 | 60 | 61 | DEFAULTS: dict[str, Any] = DefaultSettings()._asdict() 62 | IMPORT_STRINGS: set[Union[bytes, str]] = set() 63 | REMOVED_SETTINGS: set[str] = { 64 | "PK_CACHE_KEY", 65 | "DONT_OPTIMIZE_ON_ERROR", 66 | "QUERY_CACHE_KEY", 67 | } 68 | 69 | optimizer_settings = SettingsHolder( 70 | setting_name=SETTING_NAME, 71 | defaults=DEFAULTS, 72 | import_strings=IMPORT_STRINGS, 73 | removed_settings=REMOVED_SETTINGS, 74 | ) 75 | 76 | reload_my_settings = reload_settings(SETTING_NAME, optimizer_settings) 77 | setting_changed.connect(reload_my_settings) 78 | -------------------------------------------------------------------------------- /query_optimizer/types.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from typing import TYPE_CHECKING 4 | 5 | import graphene 6 | import graphene_django 7 | from django_filters.constants import ALL_FIELDS 8 | from graphene_django.utils import is_valid_django_model 9 | 10 | from .compiler import optimize_single 11 | from .settings import optimizer_settings 12 | from .typing import PK, OptimizedDjangoOptions 13 | 14 | if TYPE_CHECKING: 15 | from django.db.models import Model, QuerySet 16 | 17 | from .optimizer import QueryOptimizer 18 | from .typing import Any, GQLInfo, Literal, Optional, TModel, Union 19 | 20 | 21 | __all__ = [ 22 | "DjangoObjectType", 23 | ] 24 | 25 | 26 | class DjangoObjectType(graphene_django.types.DjangoObjectType): 27 | """DjangoObjectType that automatically optimizes its queryset.""" 28 | 29 | _meta: OptimizedDjangoOptions 30 | 31 | class Meta: 32 | abstract = True 33 | 34 | @classmethod 35 | def __init_subclass_with_meta__( 36 | cls, 37 | _meta: Optional[OptimizedDjangoOptions] = None, 38 | model: Optional[type[Model]] = None, 39 | fields: Union[list[str], Literal["__all__"], None] = None, 40 | max_complexity: Optional[int] = None, 41 | **options: Any, 42 | ) -> None: 43 | if not is_valid_django_model(model): # pragma: no cover 44 | msg = f"You need to pass a valid Django Model in {cls.__name__}.Meta, received {model}." 45 | raise TypeError(msg) 46 | 47 | if _meta is None: 48 | _meta = OptimizedDjangoOptions(cls) 49 | 50 | if not hasattr(cls, "pk") and (fields == ALL_FIELDS or fields is None or "pk" in fields): 51 | cls.pk = graphene.Int() if model._meta.pk.name == "id" else graphene.ID() 52 | cls.resolve_pk = cls.resolve_id 53 | 54 | filterset_class = options.get("filterset_class") 55 | filter_fields: Optional[dict[str, list[str]]] = options.pop("filter_fields", None) 56 | 57 | if filterset_class is None and filter_fields is not None: 58 | from .filter import create_filterset 59 | 60 | options["filterset_class"] = create_filterset(model, filter_fields) 61 | 62 | elif filterset_class is not None: 63 | from graphene_django.filter.utils import replace_csv_filters 64 | 65 | replace_csv_filters(filterset_class) 66 | 67 | _meta.max_complexity = max_complexity or optimizer_settings.MAX_COMPLEXITY 68 | super().__init_subclass_with_meta__(_meta=_meta, model=model, fields=fields, **options) 69 | 70 | @classmethod 71 | def pre_optimization_hook(cls, queryset: QuerySet[TModel], optimizer: QueryOptimizer) -> QuerySet[TModel]: 72 | """A hook for modifying the optimizer results before optimization happens.""" 73 | return queryset 74 | 75 | @classmethod 76 | def filter_queryset(cls, queryset: QuerySet[TModel], info: GQLInfo) -> QuerySet[TModel]: 77 | """Implement this method filter to the available rows from the model on this node.""" 78 | return queryset 79 | 80 | @classmethod 81 | def get_queryset(cls, queryset: QuerySet[TModel], info: GQLInfo) -> QuerySet[TModel]: 82 | return queryset 83 | 84 | @classmethod 85 | def get_node(cls, info: GQLInfo, pk: PK) -> Optional[TModel]: 86 | queryset = cls._meta.model._default_manager.all() 87 | maybe_instance = optimize_single(queryset, info, pk=pk, max_complexity=cls._meta.max_complexity) 88 | if maybe_instance is not None: # pragma: no cover 89 | cls.run_instance_checks(maybe_instance, info) 90 | return maybe_instance 91 | 92 | @classmethod 93 | def run_instance_checks(cls, instance: TModel, info: GQLInfo) -> None: 94 | """A hook for running checks after getting a single instance.""" 95 | -------------------------------------------------------------------------------- /query_optimizer/typing.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from collections.abc import Callable, Collection, Generator, Hashable, Iterable 4 | from typing import ( 5 | TYPE_CHECKING, 6 | Any, 7 | Generic, 8 | Literal, 9 | NamedTuple, 10 | Optional, 11 | ParamSpec, 12 | Protocol, 13 | TypeAlias, 14 | TypedDict, 15 | TypeGuard, 16 | TypeVar, 17 | Union, 18 | cast, 19 | overload, 20 | ) 21 | 22 | from django.core.handlers.wsgi import WSGIRequest 23 | from django.db import models 24 | from django.db.models import ( 25 | Field, 26 | ForeignKey, 27 | ForeignObject, 28 | ForeignObjectRel, 29 | Manager, 30 | ManyToManyField, 31 | ManyToManyRel, 32 | ManyToOneRel, 33 | Model, 34 | OneToOneField, 35 | QuerySet, 36 | ) 37 | from graphene import Argument, Dynamic 38 | from graphene.types.structures import Structure 39 | from graphene.types.unmountedtype import UnmountedType 40 | from graphene_django import DjangoObjectType 41 | from graphene_django.types import DjangoObjectTypeOptions 42 | from graphql import GraphQLResolveInfo 43 | from graphql_relay import ConnectionType 44 | 45 | if TYPE_CHECKING: 46 | from django.contrib.auth.models import AnonymousUser, User 47 | from django.contrib.contenttypes.fields import GenericForeignKey, GenericRelation 48 | from django.db.models.sql import Query 49 | from django_filters import FilterSet 50 | 51 | from query_optimizer.optimizer import QueryOptimizer 52 | from query_optimizer.validators import PaginationArgs 53 | 54 | __all__ = [ 55 | "GRAPHQL_BUILTIN", 56 | "PK", 57 | "Any", 58 | "ArgTypeInput", 59 | "Callable", 60 | "Collection", 61 | "ConnectionResolver", 62 | "Expr", 63 | "ExpressionKind", 64 | "GQLInfo", 65 | "Generator", 66 | "Generic", 67 | "GraphQLFilterInfo", 68 | "Hashable", 69 | "Iterable", 70 | "Literal", 71 | "ManualOptimizerMethod", 72 | "ModelField", 73 | "ModelResolver", 74 | "NamedTuple", 75 | "ObjectTypeInput", 76 | "OptimizedDjangoOptions", 77 | "Optional", 78 | "ParamSpec", 79 | "QuerySetResolver", 80 | "ToManyField", 81 | "ToOneField", 82 | "TypeGuard", 83 | "TypeVar", 84 | "TypedDict", 85 | "Union", 86 | "UnmountedTypeInput", 87 | "cast", 88 | "overload", 89 | ] 90 | 91 | 92 | TModel = TypeVar("TModel", bound=Model) 93 | PK: TypeAlias = Any 94 | ModelField: TypeAlias = Union[Field, ForeignObjectRel, "GenericForeignKey"] 95 | ToManyField: TypeAlias = Union["GenericRelation", ManyToManyField, ManyToOneRel, ManyToManyRel] 96 | ToOneField: TypeAlias = Union["GenericRelation", ForeignObject, ForeignKey, OneToOneField] 97 | AnyUser: TypeAlias = Union["User", "AnonymousUser"] 98 | QuerySetResolver: TypeAlias = Callable[..., Union[QuerySet, Manager, None]] 99 | ModelResolver: TypeAlias = Callable[..., Union[Model, None]] 100 | ConnectionResolver: TypeAlias = Callable[..., ConnectionType] 101 | ObjectTypeInput: TypeAlias = Union[ 102 | str, 103 | type[DjangoObjectType], 104 | Callable[[], type[DjangoObjectType]], 105 | ] 106 | UnmountedTypeInput: TypeAlias = Union[ 107 | str, 108 | type[UnmountedType], 109 | type[DjangoObjectType], 110 | Structure, 111 | Callable[[], type[UnmountedType]], 112 | Callable[[], type[DjangoObjectType]], 113 | ] 114 | ArgTypeInput: TypeAlias = Union[Argument, UnmountedType, Dynamic] 115 | Expr: TypeAlias = Union[models.Expression, models.F, models.Q] 116 | 117 | 118 | GRAPHQL_BUILTIN = ( 119 | "__typename", 120 | "__schema", 121 | "__type", 122 | "__typekind", 123 | "__field", 124 | "__inputvalue", 125 | "__enumvalue", 126 | "__directive", 127 | ) 128 | 129 | 130 | class UserHintedWSGIRequest(WSGIRequest): 131 | user: AnyUser 132 | 133 | optimizer_pagination: dict[str, PaginationArgs] 134 | """ 135 | This attribute is only present if it was set in 'DjangoConnectionField'. 136 | The key is the field's name in 'snake_case'. 137 | 138 | The information is also not final, since the size of the slice depends on 139 | the number of items in the queryset after filtering. 140 | It is only meant to be used for optimization, if the queryset is large, 141 | and needs to be evaluated before the optimizer does so. 142 | """ 143 | 144 | 145 | class GQLInfo(GraphQLResolveInfo): 146 | context: UserHintedWSGIRequest 147 | 148 | 149 | class OptimizedDjangoOptions(DjangoObjectTypeOptions): 150 | max_complexity: int 151 | 152 | 153 | class GraphQLFilterInfo(TypedDict, total=False): 154 | name: str 155 | filters: dict[str, Any] 156 | children: dict[str, GraphQLFilterInfo] 157 | filterset_class: Optional[type[FilterSet]] 158 | is_connection: bool 159 | is_node: bool 160 | max_limit: Optional[int] 161 | 162 | 163 | class ExpressionKind(Protocol): 164 | def resolve_expression( 165 | self, 166 | query: Query, 167 | allow_joins: bool, # noqa: FBT001 168 | reuse: set[str] | None, 169 | summarize: bool, # noqa: FBT001 170 | for_save: bool, # noqa: FBT001 171 | ) -> ExpressionKind: ... 172 | 173 | 174 | class ManualOptimizerMethod(Protocol): 175 | def __call__(self, queryset: QuerySet, optimizer: QueryOptimizer, **kwargs: Any) -> QuerySet: ... 176 | -------------------------------------------------------------------------------- /query_optimizer/utils.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import logging 4 | from typing import TYPE_CHECKING 5 | 6 | from django.db import models 7 | 8 | from .settings import optimizer_settings 9 | 10 | if TYPE_CHECKING: 11 | from .typing import Any, Optional, ParamSpec, TypeVar, Union 12 | 13 | T = TypeVar("T") 14 | P = ParamSpec("P") 15 | Ttype = TypeVar("Ttype", bound=type) 16 | 17 | 18 | __all__ = [ 19 | "SubqueryCount", 20 | "add_slice_to_queryset", 21 | "calculate_slice_for_queryset", 22 | "is_optimized", 23 | "mark_optimized", 24 | "optimizer_logger", 25 | "remove_optimized_mark", 26 | "swappable_by_subclassing", 27 | ] 28 | 29 | 30 | optimizer_logger = logging.getLogger("query_optimizer") 31 | 32 | 33 | def mark_optimized(queryset: models.QuerySet) -> None: 34 | """Mark queryset as optimized so that later optimizers know to skip optimization""" 35 | queryset._hints[optimizer_settings.OPTIMIZER_MARK] = True 36 | 37 | 38 | def remove_optimized_mark(queryset: models.QuerySet) -> None: # pragma: no cover 39 | """Mark queryset as unoptimized so that later optimizers will run optimization""" 40 | queryset._hints.pop(optimizer_settings.OPTIMIZER_MARK, None) 41 | 42 | 43 | def is_optimized(queryset: Union[models.QuerySet, list[models.Model]]) -> bool: 44 | """Has the queryset been optimized?""" 45 | # If Prefetch(..., to_attr=...) is used, the relation is a list of models. 46 | if isinstance(queryset, list): 47 | return True 48 | return queryset._hints.get(optimizer_settings.OPTIMIZER_MARK, False) 49 | 50 | 51 | def calculate_queryset_slice( 52 | *, 53 | after: Optional[int], 54 | before: Optional[int], 55 | first: Optional[int], 56 | last: Optional[int], 57 | size: int, 58 | ) -> slice: 59 | """ 60 | Calculate queryset slicing based on the provided arguments. 61 | Before this, the arguments should be validated so that: 62 | - `first` and `last`, positive integers or `None` 63 | - `after` and `before` are non-negative integers or `None` 64 | - If both `after` and `before` are given, `after` is less than or equal to `before` 65 | 66 | This function is based on the Relay pagination algorithm. 67 | See. https://relay.dev/graphql/connections.htm#sec-Pagination-algorithm 68 | 69 | :param after: The index after which to start (exclusive). 70 | :param before: The index before which to stop (exclusive). 71 | :param first: The number of items to return from the start. 72 | :param last: The number of items to return from the end (after evaluating first). 73 | :param size: The total number of items in the queryset. 74 | """ 75 | # 76 | # Start from form fetching max number of items. 77 | # 78 | start: int = 0 79 | stop: int = size 80 | # 81 | # If `after` is given, change the start index to `after`. 82 | # If `after` is greater than the current queryset size, change it to `size`. 83 | # 84 | if after is not None: 85 | start = min(after, stop) 86 | # 87 | # If `before` is given, change the stop index to `before`. 88 | # If `before` is greater than the current queryset size, change it to `size`. 89 | # 90 | if before is not None: 91 | stop = min(before, stop) 92 | # 93 | # If first is given, and it's smaller than the current queryset size, 94 | # change the stop index to `start + first` 95 | # -> Length becomes that of `first`, and the items after it have been removed. 96 | # 97 | if first is not None and first < (stop - start): 98 | stop = start + first 99 | # 100 | # If last is given, and it's smaller than the current queryset size, 101 | # change the start index to `stop - last`. 102 | # -> Length becomes that of `last`, and the items before it have been removed. 103 | # 104 | if last is not None and last < (stop - start): 105 | start = stop - last 106 | 107 | return slice(start, stop) 108 | 109 | 110 | def calculate_slice_for_queryset( 111 | queryset: models.QuerySet, 112 | *, 113 | after: Optional[int], 114 | before: Optional[int], 115 | first: Optional[int], 116 | last: Optional[int], 117 | size: int, 118 | ) -> models.QuerySet: 119 | """ 120 | Annotate queryset with pagination slice start and stop indexes. 121 | This is the Django ORM equivalent of the `calculate_queryset_slice` function. 122 | """ 123 | size_key = optimizer_settings.PREFETCH_COUNT_KEY 124 | # If the queryset has not been annotated with the total count, add an alias with the provided size. 125 | # (Since this is used in prefetch QuerySets, the provided size is likely wrong though.) 126 | if size_key not in queryset.query.annotations: # pragma: no cover 127 | queryset = queryset.alias(**{size_key: models.Value(size)}) 128 | 129 | start = models.Value(0) 130 | stop = models.F(optimizer_settings.PREFETCH_COUNT_KEY) 131 | 132 | if after is not None: 133 | start = models.Case( 134 | models.When( 135 | models.Q(**{f"{size_key}__lt": after}), 136 | then=stop, 137 | ), 138 | default=models.Value(after), 139 | output_field=models.IntegerField(), 140 | ) 141 | 142 | if before is not None: 143 | stop = models.Case( 144 | models.When( 145 | models.Q(**{f"{size_key}__lt": before}), 146 | then=stop, 147 | ), 148 | default=models.Value(before), 149 | output_field=models.IntegerField(), 150 | ) 151 | 152 | if first is not None: 153 | queryset = queryset.alias(**{f"{size_key}_size_1": stop - start}) 154 | stop = models.Case( 155 | models.When( 156 | models.Q(**{f"{size_key}_size_1__lt": first}), 157 | then=stop, 158 | ), 159 | default=start + models.Value(first), 160 | output_field=models.IntegerField(), 161 | ) 162 | 163 | if last is not None: 164 | queryset = queryset.alias(**{f"{size_key}_size_2": stop - start}) 165 | start = models.Case( 166 | models.When( 167 | models.Q(**{f"{size_key}_size_2__lt": last}), 168 | then=start, 169 | ), 170 | default=stop - models.Value(last), 171 | output_field=models.IntegerField(), 172 | ) 173 | 174 | return add_slice_to_queryset(queryset, start=start, stop=stop) 175 | 176 | 177 | def add_slice_to_queryset( 178 | queryset: models.QuerySet, 179 | *, 180 | start: models.Expression, 181 | stop: models.Expression, 182 | ) -> models.QuerySet: 183 | return queryset.alias( 184 | **{ 185 | optimizer_settings.PREFETCH_SLICE_START: start, 186 | optimizer_settings.PREFETCH_SLICE_STOP: stop, 187 | }, 188 | ) 189 | 190 | 191 | class SubqueryCount(models.Subquery): 192 | template = "(SELECT COUNT(*) FROM (%(subquery)s) _count)" 193 | output_field = models.BigIntegerField() 194 | 195 | 196 | def swappable_by_subclassing(obj: Ttype) -> Ttype: 197 | """Makes the decorated class return the most recently created direct subclass when it is instantiated.""" 198 | orig_init_subclass = obj.__init_subclass__ 199 | 200 | def init_subclass(*args: Any, **kwargs: Any) -> None: 201 | nonlocal obj 202 | 203 | new_subcls: type = obj.__subclasses__()[-1] 204 | 205 | def new(_: type, *_args: Any, **_kwargs: Any) -> Ttype: 206 | return super(type, new_subcls).__new__(new_subcls) # type: ignore[arg-type] 207 | 208 | obj.__new__ = new 209 | 210 | return orig_init_subclass(*args, **kwargs) 211 | 212 | obj.__init_subclass__ = init_subclass 213 | return obj 214 | -------------------------------------------------------------------------------- /query_optimizer/validators.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from graphene_django.settings import graphene_settings 4 | from graphql_relay import cursor_to_offset 5 | 6 | from .typing import Optional, TypedDict 7 | 8 | __all__ = [ 9 | "validate_pagination_args", 10 | ] 11 | 12 | 13 | class PaginationArgs(TypedDict): 14 | after: Optional[int] 15 | before: Optional[int] 16 | first: Optional[int] 17 | last: Optional[int] 18 | size: Optional[int] 19 | 20 | 21 | def validate_pagination_args( # noqa: C901, PLR0912 22 | first: Optional[int], 23 | last: Optional[int], 24 | offset: Optional[int], 25 | after: Optional[str], 26 | before: Optional[str], 27 | max_limit: Optional[int] = None, 28 | ) -> PaginationArgs: 29 | """ 30 | Validate the pagination arguments and return a dictionary with the validated values. 31 | 32 | :param first: Number of records to return from the beginning. 33 | :param last: Number of records to return from the end. 34 | :param offset: Number of records to skip from the beginning. 35 | :param after: Cursor value for the last record in the previous page. 36 | :param before: Cursor value for the first record in the next page. 37 | :param max_limit: Maximum limit for the number of records that can be requested. 38 | :raises ValueError: Validation error. 39 | """ 40 | after = cursor_to_offset(after) if after is not None else None 41 | before = cursor_to_offset(before) if before is not None else None 42 | 43 | if graphene_settings.RELAY_CONNECTION_ENFORCE_FIRST_OR_LAST and not (first or last): # pragma: no cover 44 | msg = "You must provide a `first` or `last` for pagination." 45 | raise ValueError(msg) 46 | 47 | if first is not None: 48 | if not isinstance(first, int) or first <= 0: 49 | msg = "Argument 'first' must be a positive integer." 50 | raise ValueError(msg) 51 | 52 | if isinstance(max_limit, int) and first > max_limit: 53 | msg = f"Requesting first {first} records exceeds the limit of {max_limit}." 54 | raise ValueError(msg) 55 | 56 | if last is not None: 57 | if not isinstance(last, int) or last <= 0: 58 | msg = "Argument 'last' must be a positive integer." 59 | raise ValueError(msg) 60 | 61 | if isinstance(max_limit, int) and last > max_limit: 62 | msg = f"Requesting last {last} records exceeds the limit of {max_limit}." 63 | raise ValueError(msg) 64 | 65 | if isinstance(max_limit, int) and first is None and last is None: 66 | first = max_limit 67 | 68 | if offset is not None: 69 | if after is not None or before is not None: 70 | msg = "Can only use either `offset` or `before`/`after` for pagination." 71 | raise ValueError(msg) 72 | if not isinstance(offset, int) or offset < 0: 73 | msg = "Argument `offset` must be a positive integer." 74 | raise ValueError(msg) 75 | 76 | # Convert offset to after cursor value. Note that after cursor dictates 77 | # a value _after_ which results should be returned, so we need to subtract 78 | # 1 from the offset to get the correct cursor value. 79 | if offset > 0: # ignore zero offset 80 | after = offset - 1 81 | 82 | if after is not None and (not isinstance(after, int) or after < 0): 83 | msg = "The node pointed with `after` does not exist." 84 | raise ValueError(msg) 85 | 86 | if before is not None and (not isinstance(before, int) or before < 0): 87 | msg = "The node pointed with `before` does not exist." 88 | raise ValueError(msg) 89 | 90 | if after is not None and before is not None and after >= before: 91 | msg = "The node pointed with `after` must be before the node pointed with `before`." 92 | raise ValueError(msg) 93 | 94 | # Since `after` is also exclusive, we need to add 1 to it, so that slicing works correctly. 95 | if after is not None: 96 | after += 1 97 | 98 | # Size is changed later with `queryset.count()`. 99 | size = max_limit if isinstance(max_limit, int) else None 100 | return PaginationArgs(after=after, before=before, first=first, last=last, size=size) 101 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | import django 4 | 5 | os.environ.setdefault("DJANGO_SETTINGS_MODULE", "app_project.config.settings") 6 | 7 | django.setup() 8 | -------------------------------------------------------------------------------- /tests/conftest.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import json 4 | from typing import TYPE_CHECKING 5 | 6 | import pytest 7 | from graphene_django.utils.testing import graphql_query 8 | 9 | from example_project.app.types import BuildingNode 10 | from example_project.app.utils import QueryData, capture_database_queries 11 | from query_optimizer.typing import Any, Callable, NamedTuple, Optional, Union 12 | 13 | if TYPE_CHECKING: 14 | from django.test.client import Client 15 | 16 | 17 | class GraphQLResponse(NamedTuple): 18 | full_content: dict[str, Any] 19 | content: Union[dict[str, Any], list[dict[str, Any]], None] 20 | errors: Optional[list[dict[str, Any]]] 21 | queries: QueryData 22 | 23 | @property 24 | def no_errors(self): 25 | return self.errors is None 26 | 27 | 28 | @pytest.fixture() 29 | def graphql_client(client: Client) -> Callable[..., GraphQLResponse]: 30 | def func(*args, **kwargs) -> GraphQLResponse: 31 | with capture_database_queries() as queries: 32 | response = graphql_query(*args, **kwargs, client=client) 33 | 34 | full_content = json.loads(response.content) 35 | errors = full_content.get("errors") 36 | content = next(iter(full_content.get("data", {}).values()), None) 37 | 38 | return GraphQLResponse( 39 | full_content=full_content, 40 | content=content, 41 | errors=errors, 42 | queries=queries, 43 | ) 44 | 45 | return func 46 | 47 | 48 | @pytest.fixture() 49 | def _set_building_node_apartments_max_limit() -> int: 50 | limit = BuildingNode.apartments.max_limit 51 | try: 52 | BuildingNode.apartments.max_limit = 1 53 | yield 54 | finally: 55 | BuildingNode.apartments.max_limit = limit 56 | 57 | 58 | @pytest.fixture() 59 | def _remove_apartment_node_apartments_max_limit() -> int: 60 | limit = BuildingNode.apartments.max_limit 61 | try: 62 | BuildingNode.apartments.max_limit = None 63 | yield 64 | finally: 65 | BuildingNode.apartments.max_limit = limit 66 | -------------------------------------------------------------------------------- /tests/factories/__init__.py: -------------------------------------------------------------------------------- 1 | from .apartment import ApartmentFactory 2 | from .building import BuildingFactory 3 | from .developer import DeveloperFactory 4 | from .employee import EmployeeFactory 5 | from .housing_company import HousingCompanyFactory 6 | from .owner import OwnerFactory 7 | from .ownership import OwnershipFactory 8 | from .postal_code import PostalCodeFactory 9 | from .property_manager import PropertyManagerFactory 10 | from .real_estate import RealEstateFactory 11 | from .sale import SaleFactory 12 | from .shareholder import ShareholderFactory 13 | from .tag import TagFactory 14 | 15 | __all__ = [ 16 | "ApartmentFactory", 17 | "BuildingFactory", 18 | "DeveloperFactory", 19 | "EmployeeFactory", 20 | "HousingCompanyFactory", 21 | "OwnerFactory", 22 | "OwnershipFactory", 23 | "PostalCodeFactory", 24 | "PropertyManagerFactory", 25 | "RealEstateFactory", 26 | "SaleFactory", 27 | "ShareholderFactory", 28 | "TagFactory", 29 | ] 30 | -------------------------------------------------------------------------------- /tests/factories/_base.py: -------------------------------------------------------------------------------- 1 | from collections.abc import Iterable 2 | 3 | from django.db.models import Model 4 | from factory import PostGeneration 5 | from factory.base import BaseFactory 6 | from factory.builder import BuildStep, Resolver 7 | from factory.declarations import SubFactory 8 | from factory.django import DjangoModelFactory 9 | from factory.utils import import_object 10 | 11 | from query_optimizer.typing import Any, Callable, Generic, Optional, TModel, Union 12 | 13 | FactoryType = Union[str, type[BaseFactory], Callable[[], type[BaseFactory]]] 14 | 15 | 16 | __all__ = [ 17 | "GenericDjangoModelFactory", 18 | "ManyToManyFactory", 19 | "OneToManyFactory", 20 | "NullableSubFactory", 21 | ] 22 | 23 | 24 | class GenericDjangoModelFactory(DjangoModelFactory, Generic[TModel]): 25 | @classmethod 26 | def build(cls: type[Generic[TModel]], **kwargs: Any) -> TModel: 27 | return super().build(**kwargs) 28 | 29 | @classmethod 30 | def create(cls: type[Generic[TModel]], **kwargs: Any) -> TModel: 31 | return super().create(**kwargs) 32 | 33 | @classmethod 34 | def build_batch(cls: type[Generic[TModel]], size: int, **kwargs: Any) -> list[TModel]: 35 | return super().build_batch(size, **kwargs) 36 | 37 | @classmethod 38 | def create_batch(cls: type[Generic[TModel]], size: int, **kwargs: Any) -> list[TModel]: 39 | return super().create_batch(size, **kwargs) 40 | 41 | 42 | class CustomFactoryWrapper: 43 | def __init__(self, factory_: FactoryType) -> None: 44 | self.factory: Optional[type[BaseFactory]] = None 45 | self.callable: Optional[Callable[..., type[BaseFactory]]] = None 46 | 47 | if isinstance(factory_, type) and issubclass(factory_, BaseFactory): 48 | self.factory = factory_ 49 | return 50 | 51 | if callable(factory_): 52 | self.callable = factory_ 53 | return 54 | 55 | if not (isinstance(factory_, str) and "." in factory_): 56 | msg = ( 57 | "The factory must be one of: " 58 | "1) a string with the format 'module.path.FactoryClass' " 59 | "2) a Factory class " 60 | "3) a callable that returns a Factory class" 61 | ) 62 | raise ValueError(msg) 63 | 64 | self.callable = lambda: import_object(*factory_.rsplit(".", 1)) 65 | 66 | def get(self): 67 | if self.factory is None: 68 | self.factory = self.callable() 69 | return self.factory 70 | 71 | 72 | class PostFactory(PostGeneration, Generic[TModel]): 73 | def __init__(self, factory: FactoryType) -> None: 74 | super().__init__(function=self.generate) 75 | self.field_name: str = "" 76 | self.factory_wrapper = CustomFactoryWrapper(factory) 77 | 78 | def __set_name__(self, owner: Any, name: str) -> None: 79 | self.field_name = name 80 | 81 | def get_factory(self) -> BaseFactory: 82 | return self.factory_wrapper.get() 83 | 84 | def generate(self, instance: Model, create: bool, models: Optional[Iterable[TModel]], **kwargs: Any) -> None: 85 | raise NotImplementedError 86 | 87 | def manager(self, instance: Model) -> Any: 88 | return getattr(instance, self.field_name) 89 | 90 | 91 | class ManyToManyFactory(PostFactory[TModel]): 92 | def generate(self, instance: Model, create: bool, models: Optional[Iterable[TModel]], **kwargs: Any) -> None: 93 | if not models and kwargs: 94 | factory = self.get_factory() 95 | model = factory.create(**kwargs) if create else factory.build(**kwargs) 96 | self.manager(instance).add(model) 97 | 98 | for model in models or []: 99 | self.manager(instance).add(model) 100 | 101 | 102 | class OneToManyFactory(PostFactory[TModel]): 103 | def generate(self, instance: Model, create: bool, models: Optional[Iterable[TModel]], **kwargs: Any) -> None: 104 | if not models and kwargs: 105 | factory = self.get_factory() 106 | field_name = self.manager(instance).field.name 107 | kwargs.setdefault(field_name, instance) 108 | factory.create(**kwargs) if create else factory.build(**kwargs) 109 | 110 | 111 | class ReverseSubFactory(PostFactory[TModel]): 112 | def generate(self, instance: Model, create: bool, models: Optional[Iterable[TModel]], **kwargs: Any) -> None: 113 | if not models and kwargs: 114 | factory = self.get_factory() 115 | field_name = instance._meta.get_field(self.field_name).remote_field.name 116 | kwargs.setdefault(field_name, instance) 117 | factory.create(**kwargs) if create else factory.build(**kwargs) 118 | 119 | 120 | class NullableSubFactory(SubFactory, Generic[TModel]): 121 | def __init__(self, factory: FactoryType, null: bool = False, **kwargs) -> None: 122 | # Skip SubFactory.__init__ to replace its factory wrapper with ours 123 | self.null = null 124 | super(SubFactory, self).__init__(**kwargs) 125 | self.factory_wrapper = CustomFactoryWrapper(factory) 126 | 127 | def evaluate(self, instance: Resolver, step: BuildStep, extra: dict[str, Any]) -> Optional[TModel]: 128 | if not extra and self.null: 129 | return None 130 | return super().evaluate(instance, step, extra) 131 | -------------------------------------------------------------------------------- /tests/factories/apartment.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | 3 | from factory import fuzzy 4 | 5 | from example_project.app.models import Apartment 6 | 7 | from ._base import GenericDjangoModelFactory, NullableSubFactory, OneToManyFactory 8 | 9 | 10 | class ApartmentFactory(GenericDjangoModelFactory[Apartment]): 11 | class Meta: 12 | model = Apartment 13 | 14 | completion_date = fuzzy.FuzzyDate(start_date=datetime.date.fromisoformat("2020-01-01")) 15 | 16 | street_address = fuzzy.FuzzyText() 17 | stair = fuzzy.FuzzyText() 18 | floor = fuzzy.FuzzyInteger(low=0) 19 | apartment_number = fuzzy.FuzzyInteger(low=0) 20 | 21 | shares_start = None 22 | shares_end = None 23 | 24 | surface_area = fuzzy.FuzzyInteger(low=0) 25 | rooms = fuzzy.FuzzyInteger(low=0) 26 | 27 | building = NullableSubFactory("tests.factories.building.BuildingFactory") 28 | sales = OneToManyFactory("tests.factories.sale.SaleFactory") 29 | -------------------------------------------------------------------------------- /tests/factories/building.py: -------------------------------------------------------------------------------- 1 | from factory import fuzzy 2 | 3 | from example_project.app.models import Building 4 | 5 | from ._base import GenericDjangoModelFactory, NullableSubFactory, OneToManyFactory 6 | 7 | 8 | class BuildingFactory(GenericDjangoModelFactory[Building]): 9 | class Meta: 10 | model = Building 11 | django_get_or_create = ["name"] 12 | 13 | name = fuzzy.FuzzyText() 14 | street_address = fuzzy.FuzzyText() 15 | real_estate = NullableSubFactory("tests.factories.real_estate.RealEstateFactory") 16 | apartments = OneToManyFactory("tests.factories.apartment.ApartmentFactory") 17 | -------------------------------------------------------------------------------- /tests/factories/developer.py: -------------------------------------------------------------------------------- 1 | from factory import fuzzy 2 | 3 | from example_project.app.models import Developer 4 | 5 | from ._base import GenericDjangoModelFactory, ManyToManyFactory 6 | 7 | 8 | class DeveloperFactory(GenericDjangoModelFactory[Developer]): 9 | class Meta: 10 | model = Developer 11 | django_get_or_create = ["name"] 12 | 13 | name = fuzzy.FuzzyText() 14 | description = fuzzy.FuzzyText() 15 | housingcompany_set = ManyToManyFactory("tests.factories.housing_company.HousingCompanyFactory") 16 | 17 | employees = ManyToManyFactory("tests.factories.employee.EmployeeFactory") 18 | -------------------------------------------------------------------------------- /tests/factories/employee.py: -------------------------------------------------------------------------------- 1 | from factory import fuzzy 2 | 3 | from example_project.app.models import Employee, EmployeeRole 4 | 5 | from ._base import GenericDjangoModelFactory, ManyToManyFactory 6 | 7 | 8 | class EmployeeFactory(GenericDjangoModelFactory[Employee]): 9 | class Meta: 10 | model = Employee 11 | django_get_or_create = ["name"] 12 | 13 | name = fuzzy.FuzzyText() 14 | role = fuzzy.FuzzyChoice(EmployeeRole.values) 15 | 16 | developers = ManyToManyFactory("tests.factories.developer.DeveloperFactory") 17 | -------------------------------------------------------------------------------- /tests/factories/housing_company.py: -------------------------------------------------------------------------------- 1 | from factory import fuzzy 2 | 3 | from example_project.app.models import HousingCompany 4 | 5 | from ._base import GenericDjangoModelFactory, ManyToManyFactory, NullableSubFactory, OneToManyFactory 6 | 7 | 8 | class HousingCompanyFactory(GenericDjangoModelFactory[HousingCompany]): 9 | class Meta: 10 | model = HousingCompany 11 | django_get_or_create = ["name"] 12 | 13 | name = fuzzy.FuzzyText() 14 | street_address = fuzzy.FuzzyText() 15 | postal_code = NullableSubFactory("tests.factories.postal_code.PostalCodeFactory") 16 | city = fuzzy.FuzzyText() 17 | developers = ManyToManyFactory("tests.factories.developer.DeveloperFactory") 18 | shareholders = ManyToManyFactory("tests.factories.shareholder.ShareholderFactory") 19 | property_manager = NullableSubFactory("tests.factories.property_manager.PropertyManagerFactory") 20 | real_estates = OneToManyFactory("tests.factories.real_estate.RealEstateFactory") 21 | -------------------------------------------------------------------------------- /tests/factories/owner.py: -------------------------------------------------------------------------------- 1 | from factory import Faker, fuzzy 2 | 3 | from example_project.app.models import Owner 4 | 5 | from ._base import GenericDjangoModelFactory, OneToManyFactory 6 | 7 | 8 | class OwnerFactory(GenericDjangoModelFactory[Owner]): 9 | class Meta: 10 | model = Owner 11 | django_get_or_create = ["name"] 12 | 13 | name = fuzzy.FuzzyText() 14 | identifier = fuzzy.FuzzyText() 15 | email = Faker("email") 16 | phone = Faker("phone_number") 17 | 18 | ownerships = OneToManyFactory("tests.factories.ownership.OwnershipFactory") 19 | -------------------------------------------------------------------------------- /tests/factories/ownership.py: -------------------------------------------------------------------------------- 1 | from factory import fuzzy 2 | 3 | from example_project.app.models import Ownership 4 | 5 | from ._base import GenericDjangoModelFactory, NullableSubFactory 6 | 7 | 8 | class OwnershipFactory(GenericDjangoModelFactory[Ownership]): 9 | class Meta: 10 | model = Ownership 11 | 12 | percentage = fuzzy.FuzzyInteger(0, 100) 13 | 14 | owner = NullableSubFactory("tests.factories.owner.OwnerFactory") 15 | sale = NullableSubFactory("tests.factories.sale.SaleFactory") 16 | -------------------------------------------------------------------------------- /tests/factories/postal_code.py: -------------------------------------------------------------------------------- 1 | from factory import fuzzy 2 | 3 | from example_project.app.models import PostalCode 4 | 5 | from ._base import GenericDjangoModelFactory, OneToManyFactory 6 | 7 | 8 | class PostalCodeFactory(GenericDjangoModelFactory[PostalCode]): 9 | class Meta: 10 | model = PostalCode 11 | django_get_or_create = ["code"] 12 | 13 | code = fuzzy.FuzzyText() 14 | housing_companies = OneToManyFactory("tests.factories.housing_company.HousingCompanyFactory") 15 | -------------------------------------------------------------------------------- /tests/factories/property_manager.py: -------------------------------------------------------------------------------- 1 | import factory 2 | from factory import fuzzy 3 | 4 | from example_project.app.models import PropertyManager 5 | 6 | from ._base import GenericDjangoModelFactory, OneToManyFactory 7 | 8 | 9 | class PropertyManagerFactory(GenericDjangoModelFactory[PropertyManager]): 10 | class Meta: 11 | model = PropertyManager 12 | django_get_or_create = ["name"] 13 | 14 | name = fuzzy.FuzzyText() 15 | email = factory.Faker("email") 16 | housing_companies = OneToManyFactory("tests.factories.housing_company.HousingCompanyFactory") 17 | -------------------------------------------------------------------------------- /tests/factories/real_estate.py: -------------------------------------------------------------------------------- 1 | from factory import fuzzy 2 | 3 | from example_project.app.models import RealEstate 4 | 5 | from ._base import GenericDjangoModelFactory, NullableSubFactory, OneToManyFactory 6 | 7 | 8 | class RealEstateFactory(GenericDjangoModelFactory[RealEstate]): 9 | class Meta: 10 | model = RealEstate 11 | django_get_or_create = ["name"] 12 | 13 | name = fuzzy.FuzzyText() 14 | surface_area = fuzzy.FuzzyInteger(1, 1000) 15 | housing_company = NullableSubFactory("tests.factories.housing_company.HousingCompanyFactory") 16 | building_set = OneToManyFactory("tests.factories.building.BuildingFactory") 17 | -------------------------------------------------------------------------------- /tests/factories/sale.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | 3 | from factory import fuzzy 4 | 5 | from example_project.app.models import Sale 6 | 7 | from ._base import GenericDjangoModelFactory, NullableSubFactory, OneToManyFactory 8 | 9 | 10 | class SaleFactory(GenericDjangoModelFactory[Sale]): 11 | class Meta: 12 | model = Sale 13 | 14 | purchase_date = fuzzy.FuzzyDate(start_date=datetime.date.fromisoformat("2020-01-01")) 15 | purchase_price = fuzzy.FuzzyInteger(1, 1000) 16 | apartment = NullableSubFactory("tests.factories.apartment.ApartmentFactory") 17 | ownerships = OneToManyFactory("tests.factories.ownership.OwnershipFactory") 18 | -------------------------------------------------------------------------------- /tests/factories/shareholder.py: -------------------------------------------------------------------------------- 1 | from factory import fuzzy 2 | 3 | from example_project.app.models import Shareholder 4 | 5 | from ._base import GenericDjangoModelFactory, ManyToManyFactory 6 | 7 | 8 | class ShareholderFactory(GenericDjangoModelFactory[Shareholder]): 9 | class Meta: 10 | model = Shareholder 11 | django_get_or_create = ["name"] 12 | 13 | name = fuzzy.FuzzyText() 14 | share = fuzzy.FuzzyDecimal(0, 100) 15 | housing_companies = ManyToManyFactory("tests.factories.housing_company.HousingCompanyFactory") 16 | -------------------------------------------------------------------------------- /tests/factories/tag.py: -------------------------------------------------------------------------------- 1 | from typing import Any 2 | 3 | from django.db.models import Model 4 | from factory import fuzzy 5 | 6 | from example_project.app.models import Tag 7 | 8 | from ._base import GenericDjangoModelFactory 9 | 10 | 11 | class TagFactory(GenericDjangoModelFactory[Tag]): 12 | class Meta: 13 | model = Tag 14 | 15 | tag = fuzzy.FuzzyText() 16 | 17 | @classmethod 18 | def build(cls, content_object: Model, **kwargs: Any) -> Tag: 19 | return super().build(content_object=content_object, **kwargs) 20 | 21 | @classmethod 22 | def create(cls, content_object: Model, **kwargs: Any) -> Tag: 23 | return super().create(content_object=content_object, **kwargs) 24 | -------------------------------------------------------------------------------- /tests/helpers.py: -------------------------------------------------------------------------------- 1 | import re 2 | 3 | from query_optimizer.typing import NamedTuple, TypedDict, TypeVar, Union 4 | 5 | __all__ = [ 6 | "parametrize_helper", 7 | ] 8 | 9 | 10 | TNamedTuple = TypeVar("TNamedTuple", bound=NamedTuple) 11 | 12 | 13 | class ParametrizeArgs(TypedDict): 14 | argnames: list[str] 15 | argvalues: list[TNamedTuple] 16 | ids: list[str] 17 | 18 | 19 | def parametrize_helper(__tests: dict[str, TNamedTuple], /) -> ParametrizeArgs: 20 | """Construct parametrize input while setting test IDs.""" 21 | assert __tests, "I need some tests, please!" # noqa: S101 22 | values = list(__tests.values()) 23 | try: 24 | return ParametrizeArgs( 25 | argnames=list(values[0].__class__.__annotations__), 26 | argvalues=values, 27 | ids=list(__tests), 28 | ) 29 | except AttributeError as error: 30 | msg = "Improper configuration. Did you use a NamedTuple for TNamedTuple?" 31 | raise RuntimeError(msg) from error 32 | 33 | 34 | class like: 35 | def __init__(self, query: str) -> None: 36 | """Compares a string to a regular expression pattern.""" 37 | self.pattern: re.Pattern[str] = re.compile(query) 38 | 39 | def __eq__(self, other: str) -> bool: 40 | if not isinstance(other, str): 41 | return False 42 | return self.pattern.match(other) is not None 43 | 44 | 45 | class has: 46 | def __init__(self, *patterns: Union[str, bytes, like]) -> None: 47 | """ 48 | Does the compared string contain the specified regular expression patterns? 49 | 50 | :param patterns: Use `str` of `like` objects for "contains" checks, and `bytes` for "excludes" checks. 51 | """ 52 | self.patterns = patterns 53 | 54 | def __eq__(self, other: str) -> bool: 55 | if not isinstance(other, str): 56 | return False 57 | return all( 58 | pattern.decode() not in other if isinstance(pattern, bytes) else pattern in other 59 | for pattern in self.patterns 60 | ) 61 | -------------------------------------------------------------------------------- /tests/test_basic_relations.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from tests.factories import ApartmentFactory, DeveloperFactory, HousingCompanyFactory, RealEstateFactory 4 | from tests.helpers import has 5 | 6 | pytestmark = [ 7 | pytest.mark.django_db, 8 | ] 9 | 10 | 11 | def test_relations__to_one_relations(graphql_client): 12 | ApartmentFactory.create(building__real_estate__housing_company__postal_code__code="00001") 13 | ApartmentFactory.create(building__real_estate__housing_company__postal_code__code="00002") 14 | ApartmentFactory.create(building__real_estate__housing_company__postal_code__code="00003") 15 | 16 | query = """ 17 | query { 18 | allApartments { 19 | building { 20 | realEstate { 21 | housingCompany { 22 | postalCode { 23 | code 24 | } 25 | } 26 | } 27 | } 28 | } 29 | } 30 | """ 31 | 32 | response = graphql_client(query) 33 | assert response.no_errors, response.errors 34 | 35 | # 1 query for fetching Apartments and related Buildings, RealEstates, HousingCompanies, and PostalCodes 36 | assert response.queries.count == 1, response.queries.log 37 | 38 | assert response.queries[0] == has( 39 | 'FROM "app_apartment"', 40 | 'INNER JOIN "app_building"', 41 | 'INNER JOIN "app_realestate"', 42 | 'INNER JOIN "app_housingcompany"', 43 | 'INNER JOIN "app_postalcode"', 44 | ) 45 | 46 | assert response.content == [ 47 | {"building": {"realEstate": {"housingCompany": {"postalCode": {"code": "00001"}}}}}, 48 | {"building": {"realEstate": {"housingCompany": {"postalCode": {"code": "00002"}}}}}, 49 | {"building": {"realEstate": {"housingCompany": {"postalCode": {"code": "00003"}}}}}, 50 | ] 51 | 52 | 53 | def test_relations__one_to_many_relations(graphql_client): 54 | ApartmentFactory.create(sales__ownerships__owner__name="1") 55 | ApartmentFactory.create(sales__ownerships__owner__name="2") 56 | ApartmentFactory.create(sales__ownerships__owner__name="3") 57 | 58 | query = """ 59 | query { 60 | allApartments { 61 | sales { 62 | ownerships { 63 | owner { 64 | name 65 | } 66 | } 67 | } 68 | } 69 | } 70 | """ 71 | 72 | response = graphql_client(query) 73 | assert response.no_errors, response.errors 74 | 75 | # 1 query for fetching Apartments 76 | # 1 query for fetching Sales 77 | # 1 query for fetching Ownerships and related Owners 78 | assert response.queries.count == 3, response.queries.log 79 | 80 | assert response.queries[0] == has('FROM "app_apartment"') 81 | assert response.queries[1] == has('FROM "app_sale"') 82 | assert response.queries[2] == has('FROM "app_ownership"', 'INNER JOIN "app_owner"') 83 | 84 | # Check that limiting is not applied to the nested fields, since they are list fields 85 | assert response.queries[1] != has("ROW_NUMBER() OVER") 86 | assert response.queries[2] != has("ROW_NUMBER() OVER") 87 | 88 | assert response.content == [ 89 | {"sales": [{"ownerships": [{"owner": {"name": "1"}}]}]}, 90 | {"sales": [{"ownerships": [{"owner": {"name": "2"}}]}]}, 91 | {"sales": [{"ownerships": [{"owner": {"name": "3"}}]}]}, 92 | ] 93 | 94 | 95 | def test_relations__one_to_many_relations__no_related_name(graphql_client): 96 | RealEstateFactory.create(building_set__name="1") 97 | RealEstateFactory.create(building_set__name="2") 98 | RealEstateFactory.create(building_set__name="3") 99 | 100 | query = """ 101 | query { 102 | allRealEstates { 103 | buildingSet { 104 | name 105 | } 106 | } 107 | } 108 | """ 109 | 110 | response = graphql_client(query) 111 | assert response.no_errors, response.errors 112 | 113 | # 1 query for fetching RealEstates 114 | # 1 query for fetching Buildings 115 | assert response.queries.count == 2, response.queries.log 116 | 117 | assert response.queries[0] == has('FROM "app_realestate"') 118 | assert response.queries[1] == has('FROM "app_building"') 119 | 120 | # Check that limiting is not applied to the nested fields, since they are list fields 121 | assert response.queries[1] != has("ROW_NUMBER() OVER") 122 | 123 | assert response.content == [ 124 | {"buildingSet": [{"name": "1"}]}, 125 | {"buildingSet": [{"name": "2"}]}, 126 | {"buildingSet": [{"name": "3"}]}, 127 | ] 128 | 129 | 130 | def test_relations__many_to_many_relations(graphql_client): 131 | HousingCompanyFactory.create(developers__name="1") 132 | HousingCompanyFactory.create(developers__name="2") 133 | HousingCompanyFactory.create(developers__name="3") 134 | 135 | query = """ 136 | query { 137 | allHousingCompanies { 138 | developers { 139 | name 140 | } 141 | } 142 | } 143 | """ 144 | 145 | response = graphql_client(query) 146 | assert response.no_errors, response.errors 147 | 148 | # 1 query for fetching HousingCompanies 149 | # 1 query for fetching Developers 150 | assert response.queries.count == 2, response.queries.log 151 | 152 | assert response.queries[0] == has('FROM "app_housingcompany"') 153 | assert response.queries[1] == has('FROM "app_developer"') 154 | 155 | # Check that limiting is not applied to the nested fields, since they are list fields 156 | assert response.queries[1] != has("ROW_NUMBER() OVER") 157 | 158 | assert response.content == [ 159 | {"developers": [{"name": "1"}]}, 160 | {"developers": [{"name": "2"}]}, 161 | {"developers": [{"name": "3"}]}, 162 | ] 163 | 164 | 165 | def test_relations__many_to_many_relations__no_related_name(graphql_client): 166 | DeveloperFactory.create(housingcompany_set__name="1") 167 | DeveloperFactory.create(housingcompany_set__name="2") 168 | DeveloperFactory.create(housingcompany_set__name="3") 169 | 170 | query = """ 171 | query { 172 | allDevelopers { 173 | housingcompanySet { 174 | name 175 | } 176 | } 177 | } 178 | """ 179 | 180 | response = graphql_client(query) 181 | assert response.no_errors, response.errors 182 | 183 | # 1 query for fetching Developers 184 | # 1 query for fetching HousingCompanies 185 | assert response.queries.count == 2, response.queries.log 186 | 187 | assert response.queries[0] == has('FROM "app_developer"') 188 | assert response.queries[1] == has('FROM "app_housingcompany"') 189 | 190 | assert response.content == [ 191 | {"housingcompanySet": [{"name": "1"}]}, 192 | {"housingcompanySet": [{"name": "2"}]}, 193 | {"housingcompanySet": [{"name": "3"}]}, 194 | ] 195 | 196 | 197 | def test_relations__many_to_many_relations__shared_entities(graphql_client): 198 | developer_1 = DeveloperFactory.create(name="1") 199 | developer_2 = DeveloperFactory.create(name="2") 200 | developer_3 = DeveloperFactory.create(name="3") 201 | developer_4 = DeveloperFactory.create(name="4") 202 | developer_5 = DeveloperFactory.create(name="5") 203 | developer_6 = DeveloperFactory.create(name="6") 204 | 205 | HousingCompanyFactory.create(developers=[developer_1, developer_2, developer_3]) 206 | HousingCompanyFactory.create(developers=[developer_3, developer_5, developer_6]) 207 | HousingCompanyFactory.create(developers=[developer_1, developer_3, developer_4, developer_6]) 208 | 209 | query = """ 210 | query { 211 | allHousingCompanies { 212 | developers { 213 | name 214 | } 215 | } 216 | } 217 | """ 218 | 219 | response = graphql_client(query) 220 | assert response.no_errors, response.errors 221 | 222 | # 1 query for fetching HousingCompanies 223 | # 1 query for fetching Developers 224 | assert response.queries.count == 2, response.queries.log 225 | 226 | assert response.queries[0] == has('FROM "app_housingcompany"') 227 | assert response.queries[1] == has('FROM "app_developer"') 228 | 229 | # Check that limiting is not applied to the nested fields, since they are list fields 230 | assert response.queries[1] != has("ROW_NUMBER() OVER") 231 | 232 | assert response.content == [ 233 | { 234 | "developers": [ 235 | {"name": "1"}, 236 | {"name": "2"}, 237 | {"name": "3"}, 238 | ] 239 | }, 240 | { 241 | "developers": [ 242 | {"name": "3"}, 243 | {"name": "5"}, 244 | {"name": "6"}, 245 | ] 246 | }, 247 | { 248 | "developers": [ 249 | {"name": "1"}, 250 | {"name": "3"}, 251 | {"name": "4"}, 252 | {"name": "6"}, 253 | ] 254 | }, 255 | ] 256 | -------------------------------------------------------------------------------- /tests/test_fragments.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from tests.factories import ApartmentFactory, DeveloperFactory, OwnerFactory, PropertyManagerFactory 4 | from tests.helpers import has 5 | 6 | pytestmark = [ 7 | pytest.mark.django_db, 8 | ] 9 | 10 | 11 | def test_fragment_spread(graphql_client): 12 | ApartmentFactory.create(shares_start=1, shares_end=2) 13 | ApartmentFactory.create(shares_start=3, shares_end=4) 14 | ApartmentFactory.create(shares_start=5, shares_end=6) 15 | 16 | query = """ 17 | query { 18 | allApartments { 19 | ...Shares 20 | } 21 | } 22 | 23 | fragment Shares on ApartmentType { 24 | sharesStart 25 | sharesEnd 26 | } 27 | """ 28 | 29 | response = graphql_client(query) 30 | assert response.no_errors, response.errors 31 | 32 | # 1 query for fetching apartments. 33 | assert response.queries.count == 1, response.queries.log 34 | 35 | assert response.queries[0] == has( 36 | 'FROM "app_apartment"', 37 | '"app_apartment"."shares_start"', 38 | '"app_apartment"."shares_end"', 39 | ) 40 | 41 | assert response.content == [ 42 | {"sharesStart": 1, "sharesEnd": 2}, 43 | {"sharesStart": 3, "sharesEnd": 4}, 44 | {"sharesStart": 5, "sharesEnd": 6}, 45 | ] 46 | 47 | 48 | def test_fragment_spread__relations(graphql_client): 49 | ApartmentFactory.create(building__real_estate__housing_company__postal_code__code="00001") 50 | ApartmentFactory.create(building__real_estate__housing_company__postal_code__code="00002") 51 | ApartmentFactory.create(building__real_estate__housing_company__postal_code__code="00003") 52 | 53 | query = """ 54 | query { 55 | allApartments { 56 | ...Address 57 | } 58 | } 59 | 60 | fragment Address on ApartmentType { 61 | building { 62 | realEstate { 63 | housingCompany { 64 | postalCode { 65 | code 66 | } 67 | } 68 | } 69 | } 70 | } 71 | """ 72 | 73 | response = graphql_client(query) 74 | assert response.no_errors, response.errors 75 | 76 | # 1 query for fetching apartments and related buildings, real estates, housing companies, and postal codes 77 | assert response.queries.count == 1, response.queries.log 78 | 79 | assert response.queries[0] == has( 80 | 'FROM "app_apartment"', 81 | 'INNER JOIN "app_building"', 82 | 'INNER JOIN "app_realestate"', 83 | 'INNER JOIN "app_housingcompany"', 84 | 'INNER JOIN "app_postalcode"', 85 | ) 86 | 87 | assert response.content == [ 88 | {"building": {"realEstate": {"housingCompany": {"postalCode": {"code": "00001"}}}}}, 89 | {"building": {"realEstate": {"housingCompany": {"postalCode": {"code": "00002"}}}}}, 90 | {"building": {"realEstate": {"housingCompany": {"postalCode": {"code": "00003"}}}}}, 91 | ] 92 | 93 | 94 | def test_fragment_spread__one_to_many_relations(graphql_client): 95 | ApartmentFactory.create(sales__ownerships__owner__name="1") 96 | ApartmentFactory.create(sales__ownerships__owner__name="2") 97 | ApartmentFactory.create(sales__ownerships__owner__name="3") 98 | 99 | query = """ 100 | query { 101 | allApartments { 102 | ...Sales 103 | } 104 | } 105 | 106 | fragment Sales on ApartmentType { 107 | sales { 108 | ownerships { 109 | owner { 110 | name 111 | } 112 | } 113 | } 114 | } 115 | """ 116 | 117 | response = graphql_client(query) 118 | assert response.no_errors, response.errors 119 | 120 | # 1 query for fetching apartments. 121 | # 1 query for fetching sales. 122 | # 1 query for fetching ownerships and related owners. 123 | assert response.queries.count == 3, response.queries.log 124 | 125 | assert response.queries[0] == has( 126 | 'FROM "app_apartment"', 127 | ) 128 | assert response.queries[1] == has( 129 | 'FROM "app_sale"', 130 | ) 131 | assert response.queries[2] == has( 132 | 'FROM "app_ownership"', 133 | 'INNER JOIN "app_owner"', 134 | ) 135 | 136 | assert response.content == [ 137 | {"sales": [{"ownerships": [{"owner": {"name": "1"}}]}]}, 138 | {"sales": [{"ownerships": [{"owner": {"name": "2"}}]}]}, 139 | {"sales": [{"ownerships": [{"owner": {"name": "3"}}]}]}, 140 | ] 141 | 142 | 143 | def test_fragment_spread__same_relation_in_multiple_fragments(graphql_client): 144 | ApartmentFactory.create( 145 | sales__purchase_date="2020-01-01", 146 | sales__purchase_price=100, 147 | sales__ownerships__percentage=10, 148 | sales__ownerships__owner__name="foo", 149 | ) 150 | ApartmentFactory.create( 151 | sales__purchase_date="2020-01-02", 152 | sales__purchase_price=200, 153 | sales__ownerships__percentage=20, 154 | sales__ownerships__owner__name="bar", 155 | ) 156 | ApartmentFactory.create( 157 | sales__purchase_date="2020-01-03", 158 | sales__purchase_price=300, 159 | sales__ownerships__percentage=30, 160 | sales__ownerships__owner__name="baz", 161 | ) 162 | 163 | query = """ 164 | query { 165 | allApartments { 166 | surfaceArea 167 | ...PurchaseData 168 | ...SalesData 169 | } 170 | } 171 | 172 | fragment PurchaseData on ApartmentType { 173 | sales { 174 | purchaseDate 175 | ownerships { 176 | percentage 177 | owner { 178 | name 179 | } 180 | } 181 | } 182 | } 183 | 184 | fragment SalesData on ApartmentType { 185 | sales { 186 | purchasePrice 187 | apartment { 188 | sharesStart 189 | sharesEnd 190 | } 191 | } 192 | } 193 | """ 194 | 195 | response = graphql_client(query) 196 | assert response.no_errors, response.errors 197 | 198 | # 1 query for fetching apartments 199 | # 1 query for fetching sales and apartments 200 | # 1 query for fetching ownerships and owners 201 | assert response.queries.count == 3, response.queries.log 202 | 203 | assert response.queries[0] == has( 204 | 'FROM "app_apartment"', 205 | ) 206 | assert response.queries[1] == has( 207 | "purchase_date", 208 | "purchase_price", 209 | 'FROM "app_sale"', 210 | 'INNER JOIN "app_apartment"', 211 | ) 212 | assert response.queries[2] == has( 213 | 'FROM "app_ownership"', 214 | 'INNER JOIN "app_owner"', 215 | ) 216 | 217 | 218 | def test_inline_fragment(graphql_client): 219 | DeveloperFactory.create(name="1", housingcompany_set__name="1") 220 | PropertyManagerFactory.create(name="1", housing_companies__name="1") 221 | OwnerFactory.create(name="1", ownerships__percentage=100) 222 | 223 | query = """ 224 | query { 225 | allPeople { 226 | ... on DeveloperType { 227 | name 228 | housingcompanySet { 229 | name 230 | } 231 | __typename 232 | } 233 | ... on PropertyManagerType { 234 | name 235 | housingCompanies { 236 | name 237 | } 238 | __typename 239 | } 240 | ... on OwnerType { 241 | name 242 | ownerships { 243 | percentage 244 | } 245 | __typename 246 | } 247 | } 248 | } 249 | """ 250 | 251 | response = graphql_client(query) 252 | assert response.no_errors, response.errors 253 | 254 | # 1 query for fetching developers. 255 | # 1 query for fetching housing companies for developers. 256 | # 1 query for fetching property managers. 257 | # 1 query for fetching housing companies for property managers. 258 | # 1 query for fetching owners. 259 | # 1 query for fetching ownerships for owners. 260 | assert response.queries.count == 6, response.queries.log 261 | 262 | assert response.queries[0] == has( 263 | 'FROM "app_developer"', 264 | ) 265 | assert response.queries[1] == has( 266 | 'FROM "app_housingcompany"', 267 | ) 268 | assert response.queries[2] == has( 269 | 'FROM "app_propertymanager"', 270 | ) 271 | assert response.queries[3] == has( 272 | 'FROM "app_housingcompany"', 273 | ) 274 | assert response.queries[4] == has( 275 | 'FROM "app_owner"', 276 | ) 277 | assert response.queries[5] == has( 278 | 'FROM "app_ownership"', 279 | ) 280 | -------------------------------------------------------------------------------- /tests/test_relay_node.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from graphql_relay import to_global_id 3 | 4 | from example_project.app.types import ApartmentNode, BuildingNode 5 | from tests.factories import ApartmentFactory, BuildingFactory 6 | from tests.helpers import has 7 | 8 | pytestmark = [ 9 | pytest.mark.django_db, 10 | ] 11 | 12 | 13 | def test_relay__global_node(graphql_client): 14 | apartment = ApartmentFactory.create(building__name="1") 15 | global_id = to_global_id(str(ApartmentNode), apartment.pk) 16 | 17 | query = """ 18 | query { 19 | node(id: "%s") { 20 | ... on ApartmentNode { 21 | building { 22 | name 23 | } 24 | } 25 | } 26 | } 27 | """ % (global_id,) 28 | 29 | response = graphql_client(query) 30 | assert response.no_errors, response.errors 31 | 32 | # 1 query for fetching apartment and related buildings 33 | assert response.queries.count == 1, response.queries.log 34 | 35 | assert response.queries[0] == has( 36 | 'FROM "app_apartment"', 37 | 'INNER JOIN "app_building"', 38 | ) 39 | 40 | assert response.content == {"building": {"name": "1"}} 41 | 42 | 43 | def test_relay__global_node_without_fragment(graphql_client): 44 | apartment = ApartmentFactory.create(building__name="1") 45 | global_id = to_global_id(str(ApartmentNode), apartment.pk) 46 | 47 | query = """ 48 | query { 49 | node(id: "%s") { 50 | __typename 51 | id 52 | } 53 | } 54 | """ % (global_id,) 55 | 56 | response = graphql_client(query) 57 | 58 | assert response.no_errors, response.errors 59 | assert response.queries.count == 1, response.queries.log 60 | assert response.content == {"__typename": "ApartmentNode", "id": global_id} 61 | 62 | 63 | def test_relay__node(graphql_client): 64 | apartment = ApartmentFactory.create(building__name="1") 65 | global_id = to_global_id(str(ApartmentNode), apartment.pk) 66 | 67 | query = """ 68 | query { 69 | apartment(id: "%s") { 70 | building { 71 | name 72 | } 73 | } 74 | } 75 | """ % (global_id,) 76 | 77 | response = graphql_client(query) 78 | assert response.no_errors, response.errors 79 | 80 | # 1 query for fetching apartment and related buildings 81 | assert response.queries.count == 1, response.queries.log 82 | 83 | assert response.queries[0] == has( 84 | 'FROM "app_apartment"', 85 | 'INNER JOIN "app_building"', 86 | ) 87 | 88 | assert response.content == {"building": {"name": "1"}} 89 | 90 | 91 | def test_relay__node__deep(graphql_client): 92 | apartment = ApartmentFactory.create(sales__ownerships__owner__name="1") 93 | global_id = to_global_id(str(ApartmentNode), apartment.pk) 94 | 95 | query = """ 96 | query { 97 | apartment(id: "%s") { 98 | sales { 99 | ownerships { 100 | owner { 101 | name 102 | } 103 | } 104 | } 105 | } 106 | } 107 | """ % (global_id,) 108 | 109 | response = graphql_client(query) 110 | assert response.no_errors, response.errors 111 | 112 | # 1 query for fetching the apartment. 113 | # 1 query for fetching sales. 114 | # 1 query for fetching ownerships and related owners. 115 | assert response.queries.count == 3, response.queries.log 116 | 117 | assert response.queries[0] == has( 118 | 'FROM "app_apartment"', 119 | ) 120 | assert response.queries[1] == has( 121 | 'FROM "app_sale"', 122 | ) 123 | assert response.queries[2] == has( 124 | 'FROM "app_ownership"', 125 | 'INNER JOIN "app_owner"', 126 | ) 127 | 128 | assert response.content == { 129 | "sales": [ 130 | { 131 | "ownerships": [ 132 | {"owner": {"name": "1"}}, 133 | ], 134 | }, 135 | ], 136 | } 137 | 138 | 139 | def test_relay__node__doesnt_mess_up_filterset(graphql_client): 140 | building = BuildingFactory.create() 141 | global_id = to_global_id(str(BuildingNode), building.pk) 142 | 143 | # Test that for nodes, we don't run the `filterset_class` filters. 144 | # This would result in an error, since the ID for nodes is a global ID, and not a primary key. 145 | query = """ 146 | query { 147 | building(id: "%s") { 148 | id 149 | } 150 | } 151 | """ % (global_id,) 152 | 153 | response = graphql_client(query) 154 | assert response.no_errors, response.errors 155 | 156 | # 1 query for fetching Buildings 157 | assert response.queries.count == 1, response.queries.log 158 | 159 | assert response.queries[0] == has( 160 | 'FROM "app_building"', 161 | ) 162 | 163 | assert response.content == {"id": global_id} 164 | 165 | 166 | def test_relay__node__doesnt_mess_up_filterset__nested_filtering(graphql_client): 167 | building = BuildingFactory.create() 168 | ApartmentFactory.create(street_address="1", building=building) 169 | ApartmentFactory.create(street_address="2", building=building) 170 | 171 | global_id = to_global_id(str(BuildingNode), building.pk) 172 | 173 | # Check that for nested connections in relay nodes, we still run the filters. 174 | query = """ 175 | query { 176 | building(id: "%s") { 177 | apartments(streetAddress:"1") { 178 | edges { 179 | node { 180 | streetAddress 181 | } 182 | } 183 | } 184 | } 185 | } 186 | """ % (global_id,) 187 | 188 | response = graphql_client(query) 189 | assert response.no_errors, response.errors 190 | 191 | # 1 query for fetching buildings. 192 | # 1 query for fetching apartments. 193 | assert response.queries.count == 2, response.queries.log 194 | 195 | assert response.queries[0] == has( 196 | 'FROM "app_building"', 197 | ) 198 | assert response.queries[1] == has( 199 | 'FROM "app_apartment"', 200 | ) 201 | 202 | # Check that the nested filter is actually applied 203 | assert response.content == {"apartments": {"edges": [{"node": {"streetAddress": "1"}}]}} 204 | -------------------------------------------------------------------------------- /tests/test_selections.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import contextlib 4 | from typing import TYPE_CHECKING 5 | from unittest.mock import patch 6 | 7 | import pytest 8 | 9 | from query_optimizer.selections import get_field_selections 10 | from tests.factories import HousingCompanyFactory 11 | 12 | if TYPE_CHECKING: 13 | from query_optimizer.typing import GQLInfo 14 | 15 | pytestmark = [ 16 | pytest.mark.django_db, 17 | ] 18 | 19 | 20 | @contextlib.contextmanager 21 | def mock_selections(): 22 | selections = [] 23 | 24 | def tracker(info: GQLInfo) -> list[str]: 25 | selections.extend(get_field_selections(info)) 26 | return selections 27 | 28 | path = "example_project.app.schema.get_field_selections" 29 | with patch(path, side_effect=tracker): 30 | yield selections 31 | 32 | 33 | def test_get_field_selections__simple(graphql_client): 34 | HousingCompanyFactory.create(name="foo") 35 | 36 | query = """ 37 | query { 38 | housingCompanyByName(name:"foo") { 39 | pk 40 | } 41 | } 42 | """ 43 | 44 | with mock_selections() as selections: 45 | response = graphql_client(query) 46 | 47 | assert response.no_errors, response.errors 48 | 49 | assert selections == ["pk"] 50 | 51 | 52 | def test_get_field_selections__one_to_one_related(graphql_client): 53 | HousingCompanyFactory.create(name="foo") 54 | 55 | query = """ 56 | query { 57 | housingCompanyByName(name:"foo") { 58 | pk 59 | postalCode { 60 | code 61 | } 62 | } 63 | } 64 | """ 65 | 66 | with mock_selections() as selections: 67 | response = graphql_client(query) 68 | 69 | assert response.no_errors, response.errors 70 | 71 | assert selections == ["pk", {"postal_code": ["code"]}] 72 | 73 | 74 | def test_get_field_selections__one_to_many_related(graphql_client): 75 | HousingCompanyFactory.create(name="foo") 76 | 77 | query = """ 78 | query { 79 | housingCompanyByName(name:"foo") { 80 | pk 81 | realEstates { 82 | pk 83 | } 84 | } 85 | } 86 | """ 87 | 88 | with mock_selections() as selections: 89 | response = graphql_client(query) 90 | 91 | assert response.no_errors, response.errors 92 | 93 | assert selections == ["pk", {"real_estates": ["pk"]}] 94 | 95 | 96 | def test_get_field_selections__many_to_many_related(graphql_client): 97 | HousingCompanyFactory.create(name="foo") 98 | 99 | query = """ 100 | query { 101 | housingCompanyByName(name:"foo") { 102 | pk 103 | developers { 104 | pk 105 | } 106 | } 107 | } 108 | """ 109 | 110 | with mock_selections() as selections: 111 | response = graphql_client(query) 112 | 113 | assert response.no_errors, response.errors 114 | 115 | assert selections == ["pk", {"developers": ["pk"]}] 116 | 117 | 118 | def test_get_field_selections__plain_object_type(graphql_client): 119 | HousingCompanyFactory.create(name="foo") 120 | 121 | query = """ 122 | query { 123 | plain { 124 | foo 125 | } 126 | } 127 | """ 128 | 129 | with mock_selections() as selections: 130 | response = graphql_client(query) 131 | 132 | assert response.no_errors, response.errors 133 | 134 | assert selections == ["foo"] 135 | 136 | 137 | def test_get_field_selections__plain_object_type__nested(graphql_client): 138 | HousingCompanyFactory.create(name="foo") 139 | 140 | query = """ 141 | query { 142 | plain { 143 | foo 144 | bar { 145 | x 146 | } 147 | } 148 | } 149 | """ 150 | 151 | with mock_selections() as selections: 152 | response = graphql_client(query) 153 | 154 | assert response.no_errors, response.errors 155 | 156 | assert selections == ["foo", {"bar": ["x"]}] 157 | -------------------------------------------------------------------------------- /tests/test_utils.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from django.db import models 3 | 4 | from example_project.app.models import Example 5 | from query_optimizer.settings import optimizer_settings 6 | from query_optimizer.typing import NamedTuple, Optional 7 | from query_optimizer.utils import calculate_queryset_slice, calculate_slice_for_queryset, swappable_by_subclassing 8 | from tests.factories.example import ExampleFactory 9 | from tests.helpers import parametrize_helper 10 | 11 | 12 | class PaginationInput(NamedTuple): 13 | first: Optional[int] = None 14 | last: Optional[int] = None 15 | after: Optional[int] = None 16 | before: Optional[int] = None 17 | size: int = 100 18 | 19 | 20 | class Params(NamedTuple): 21 | pagination_input: PaginationInput 22 | start: int 23 | stop: int 24 | 25 | 26 | TEST_CASES = { 27 | "default": Params( 28 | pagination_input=PaginationInput(), 29 | start=0, 30 | stop=100, 31 | ), 32 | "after": Params( 33 | pagination_input=PaginationInput(after=1), 34 | start=1, 35 | stop=100, 36 | ), 37 | "before": Params( 38 | pagination_input=PaginationInput(before=99), 39 | start=0, 40 | stop=99, 41 | ), 42 | "first": Params( 43 | pagination_input=PaginationInput(first=10), 44 | start=0, 45 | stop=10, 46 | ), 47 | "last": Params( 48 | pagination_input=PaginationInput(last=10), 49 | start=90, 50 | stop=100, 51 | ), 52 | "after_before": Params( 53 | pagination_input=PaginationInput(after=1, before=99), 54 | start=1, 55 | stop=99, 56 | ), 57 | "first_last": Params( 58 | pagination_input=PaginationInput(first=10, last=8), 59 | start=2, 60 | stop=10, 61 | ), 62 | "after_before_first_last": Params( 63 | pagination_input=PaginationInput(after=1, before=99, first=10, last=8), 64 | start=3, 65 | stop=11, 66 | ), 67 | "after_bigger_than_size": Params( 68 | pagination_input=PaginationInput(after=101), 69 | start=100, 70 | stop=100, 71 | ), 72 | "before_bigger_than_size": Params( 73 | pagination_input=PaginationInput(before=101), 74 | start=0, 75 | stop=100, 76 | ), 77 | "first_bigger_than_size": Params( 78 | pagination_input=PaginationInput(first=101), 79 | start=0, 80 | stop=100, 81 | ), 82 | "last_bigger_than_size": Params( 83 | pagination_input=PaginationInput(last=101), 84 | start=0, 85 | stop=100, 86 | ), 87 | "after_is_size": Params( 88 | pagination_input=PaginationInput(after=100), 89 | start=100, 90 | stop=100, 91 | ), 92 | "before_is_size": Params( 93 | pagination_input=PaginationInput(before=100), 94 | start=0, 95 | stop=100, 96 | ), 97 | "first_is_size": Params( 98 | pagination_input=PaginationInput(first=100), 99 | start=0, 100 | stop=100, 101 | ), 102 | "last_is_size": Params( 103 | pagination_input=PaginationInput(last=100), 104 | start=0, 105 | stop=100, 106 | ), 107 | "first_bigger_than_after_before": Params( 108 | pagination_input=PaginationInput(after=10, before=20, first=20), 109 | start=10, 110 | stop=20, 111 | ), 112 | "last_bigger_than_after_before": Params( 113 | pagination_input=PaginationInput(after=10, before=20, last=20), 114 | start=10, 115 | stop=20, 116 | ), 117 | } 118 | 119 | 120 | @pytest.mark.parametrize(**parametrize_helper(TEST_CASES)) 121 | def test_calculate_queryset_slice(pagination_input: PaginationInput, start: int, stop: int) -> None: 122 | cut = calculate_queryset_slice(**pagination_input._asdict()) 123 | assert cut.start == start 124 | assert cut.stop == stop 125 | 126 | 127 | @pytest.mark.django_db() 128 | @pytest.mark.parametrize(**parametrize_helper(TEST_CASES)) 129 | def test_calculate_slice_for_queryset(pagination_input: PaginationInput, start: int, stop: int) -> None: 130 | ExampleFactory.create() 131 | 132 | qs = Example.objects.all() 133 | qs = qs.annotate(**{optimizer_settings.PREFETCH_COUNT_KEY: models.Value(pagination_input.size)}) 134 | 135 | qs = calculate_slice_for_queryset(qs, **pagination_input._asdict()) 136 | 137 | values = ( 138 | qs.annotate( 139 | start=models.F(optimizer_settings.PREFETCH_SLICE_START), 140 | stop=models.F(optimizer_settings.PREFETCH_SLICE_STOP), 141 | ) 142 | .values("start", "stop") 143 | .first() 144 | ) 145 | 146 | assert values == {"start": start, "stop": stop} 147 | 148 | 149 | def test_swappable_by_subclassing(): 150 | @swappable_by_subclassing 151 | class A: 152 | def __init__(self, arg: int = 1) -> None: 153 | self.one = arg 154 | 155 | a = A() 156 | assert type(a) is A 157 | assert a.one == 1 158 | 159 | class B(A): 160 | def __init__(self, arg: int = 1) -> None: 161 | super().__init__(arg) 162 | self.two = arg * 2 163 | 164 | b = A(2) 165 | assert type(b) is B 166 | assert b.one == 2 167 | assert b.two == 4 168 | 169 | class C(A): 170 | def __init__(self, arg: int = 1, second_arg: int = 2) -> None: 171 | super().__init__(arg) 172 | self.three = second_arg * 3 173 | 174 | c = A(3, 4) 175 | assert type(c) is C 176 | assert c.one == 3 177 | assert not hasattr(c, "two") 178 | assert c.three == 12 179 | 180 | class D(B): ... 181 | 182 | d = A() 183 | assert type(d) is C # Only direct subclasses are swapped. 184 | -------------------------------------------------------------------------------- /tests/test_validators.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from graphql_relay import offset_to_cursor 3 | 4 | from query_optimizer.typing import Any, NamedTuple, Optional 5 | from query_optimizer.validators import PaginationArgs, validate_pagination_args 6 | from tests.helpers import parametrize_helper 7 | 8 | 9 | class PaginationInput(NamedTuple): 10 | first: Any = None 11 | last: Any = None 12 | offset: Any = None 13 | after: Any = None 14 | before: Any = None 15 | max_limit: Any = None 16 | 17 | 18 | class Params(NamedTuple): 19 | pagination_input: PaginationInput 20 | output: PaginationArgs 21 | errors: Optional[str] 22 | 23 | 24 | @pytest.mark.parametrize( 25 | **parametrize_helper( 26 | { 27 | "first": Params( 28 | pagination_input=PaginationInput(), 29 | output=PaginationArgs(after=None, before=None, first=None, last=None, size=None), 30 | errors=None, 31 | ), 32 | "last": Params( 33 | pagination_input=PaginationInput(last=1), 34 | output=PaginationArgs(after=None, before=None, first=None, last=1, size=None), 35 | errors=None, 36 | ), 37 | "offset": Params( 38 | pagination_input=PaginationInput(offset=1), 39 | output=PaginationArgs(after=1, before=None, first=None, last=None, size=None), 40 | errors=None, 41 | ), 42 | "after": Params( 43 | pagination_input=PaginationInput(after=offset_to_cursor(0)), 44 | # Add 1 to after to make it exclusive in slicing. 45 | output=PaginationArgs(after=1, before=None, first=None, last=None, size=None), 46 | errors=None, 47 | ), 48 | "before": Params( 49 | pagination_input=PaginationInput(before=offset_to_cursor(0)), 50 | output=PaginationArgs(after=None, before=0, first=None, last=None, size=None), 51 | errors=None, 52 | ), 53 | "max limit": Params( 54 | pagination_input=PaginationInput(max_limit=1), 55 | output=PaginationArgs(after=None, before=None, first=1, last=None, size=1), 56 | errors=None, 57 | ), 58 | "first zero": Params( 59 | pagination_input=PaginationInput(first=0), 60 | output=PaginationArgs(after=None, before=None, first=None, last=None, size=None), 61 | errors="Argument 'first' must be a positive integer.", 62 | ), 63 | "last zero": Params( 64 | pagination_input=PaginationInput(last=0), 65 | output=PaginationArgs(after=None, before=None, first=None, last=None, size=None), 66 | errors="Argument 'last' must be a positive integer.", 67 | ), 68 | "first negative": Params( 69 | pagination_input=PaginationInput(first=-1), 70 | output=PaginationArgs(after=None, before=None, first=None, last=None, size=None), 71 | errors="Argument 'first' must be a positive integer.", 72 | ), 73 | "last negative": Params( 74 | pagination_input=PaginationInput(last=-1), 75 | output=PaginationArgs(after=None, before=None, first=None, last=None, size=None), 76 | errors="Argument 'last' must be a positive integer.", 77 | ), 78 | "first exceeds max limit": Params( 79 | pagination_input=PaginationInput(first=2, max_limit=1), 80 | output=PaginationArgs(after=None, before=None, first=None, last=None, size=None), 81 | errors="Requesting first 2 records exceeds the limit of 1.", 82 | ), 83 | "last exceeds max limit": Params( 84 | pagination_input=PaginationInput(last=2, max_limit=1), 85 | output=PaginationArgs(after=None, before=None, first=None, last=None, size=None), 86 | errors="Requesting last 2 records exceeds the limit of 1.", 87 | ), 88 | "offset zero": Params( 89 | pagination_input=PaginationInput(offset=0), 90 | output=PaginationArgs(after=None, before=None, first=None, last=None, size=None), 91 | errors=None, 92 | ), 93 | "after negative": Params( 94 | pagination_input=PaginationInput(after=offset_to_cursor(-1)), 95 | output=PaginationArgs(after=None, before=None, first=None, last=None, size=None), 96 | errors="The node pointed with `after` does not exist.", 97 | ), 98 | "before negative": Params( 99 | pagination_input=PaginationInput(before=offset_to_cursor(-1)), 100 | output=PaginationArgs(after=None, before=None, first=None, last=None, size=None), 101 | errors="The node pointed with `before` does not exist.", 102 | ), 103 | "after before": Params( 104 | pagination_input=PaginationInput(after=offset_to_cursor(1), before=offset_to_cursor(0)), 105 | output=PaginationArgs(after=None, before=None, first=None, last=None, size=None), 106 | errors="The node pointed with `after` must be before the node pointed with `before`.", 107 | ), 108 | "offset after": Params( 109 | pagination_input=PaginationInput(offset=1, after=offset_to_cursor(0)), 110 | output=PaginationArgs(after=None, before=None, first=None, last=None, size=None), 111 | errors="Can only use either `offset` or `before`/`after` for pagination.", 112 | ), 113 | "offset before": Params( 114 | pagination_input=PaginationInput(offset=1, before=offset_to_cursor(0)), 115 | output=PaginationArgs(after=None, before=None, first=None, last=None, size=None), 116 | errors="Can only use either `offset` or `before`/`after` for pagination.", 117 | ), 118 | "first not int": Params( 119 | pagination_input=PaginationInput(first="0"), 120 | output=PaginationArgs(after=None, before=None, first=None, last=None, size=None), 121 | errors="Argument 'first' must be a positive integer.", 122 | ), 123 | "last not int": Params( 124 | pagination_input=PaginationInput(last="0"), 125 | output=PaginationArgs(after=None, before=None, first=None, last=None, size=None), 126 | errors="Argument 'last' must be a positive integer.", 127 | ), 128 | "offset not int": Params( 129 | pagination_input=PaginationInput(offset="0"), 130 | output=PaginationArgs(after=None, before=None, first=None, last=None, size=None), 131 | errors="Argument `offset` must be a positive integer.", 132 | ), 133 | "max limit not int": Params( 134 | pagination_input=PaginationInput(max_limit="0"), 135 | output=PaginationArgs(after=None, before=None, first=None, last=None, size=None), 136 | errors=None, 137 | ), 138 | } 139 | ), 140 | ) 141 | def test_validate_pagination_args(pagination_input, output, errors): 142 | try: 143 | args = validate_pagination_args(**pagination_input._asdict()) 144 | except ValueError as error: 145 | if errors is None: 146 | pytest.fail(f"Unexpected error: {error}") 147 | assert str(error) == errors 148 | else: 149 | if errors is not None: 150 | pytest.fail(f"Expected error: {errors}") 151 | assert args == output 152 | --------------------------------------------------------------------------------