├── .github ├── ISSUE_TEMPLATE │ ├── bug_report.md │ └── config.yml └── workflows │ └── ci.yaml ├── .gitignore ├── .pre-commit-config.yaml ├── .pylintrc ├── .vscode ├── launch.json ├── settings.json └── tasks.json ├── CHANGELOG.md ├── LICENSE ├── README.md ├── docker-compose.yml ├── docs ├── Concept.md ├── DatabaseClient.md ├── Logging.md ├── Migrations.md ├── Models.md ├── Query.md └── RelationshipProperty.md ├── poetry.lock ├── pyneo4j_ogm ├── __init__.py ├── core │ ├── base.py │ ├── client.py │ ├── node.py │ └── relationship.py ├── exceptions.py ├── fields │ ├── property_options.py │ ├── relationship_property.py │ └── settings.py ├── logger.py ├── migrations │ ├── __init__.py │ ├── actions │ │ ├── create.py │ │ ├── down.py │ │ ├── init.py │ │ ├── status.py │ │ └── up.py │ ├── cli.py │ └── utils │ │ ├── client.py │ │ ├── defaults.py │ │ ├── migration.py │ │ └── models.py ├── pydantic_utils.py └── queries │ ├── operators.py │ ├── query_builder.py │ ├── types.py │ └── validators.py ├── pyproject.toml ├── pyrightconfig.json ├── templates └── CHANGELOG.md.j2 └── tests ├── __init__.py ├── core ├── test_base.py ├── test_client.py ├── test_node.py └── test_relationship.py ├── fields ├── test_property_options.py ├── test_relationship_property.py └── test_settings.py ├── fixtures ├── db_setup.py ├── migrations.py ├── models │ ├── models_top.py │ ├── nested │ │ ├── deeply_nested │ │ │ ├── model_deeply_nested.py │ │ │ └── other_classes.py │ │ ├── model_nested.py │ │ └── no_models.py │ └── other_classes.py ├── operators_builder.py └── query_builder.py ├── migrations └── actions │ ├── test_create.py │ ├── test_down.py │ ├── test_init.py │ ├── test_status.py │ └── test_up.py ├── queries ├── test_operators.py ├── test_query_builder.py └── test_validators.py └── utils ├── __init__.py └── string_utils.py /.github/ISSUE_TEMPLATE/bug_report.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Bug report 3 | about: Create a report to help us improve 4 | title: '' 5 | labels: '' 6 | assignees: '' 7 | 8 | --- 9 | 10 | ### Subject of the issue 11 | 12 | Describe your issue here. 13 | 14 | ### Your environment 15 | 16 | * Version of pyneo4j-ogm 17 | * Version of pydantic 18 | * Version of python 19 | 20 | ### Steps to reproduce 21 | 22 | Tell us how to reproduce this issue. 23 | 24 | ```python 25 | Please add a code snippet here, that reproduces the problem completely. 26 | ``` 27 | 28 | ### Expected behaviour 29 | 30 | Tell us what should happen 31 | 32 | ### Actual behaviour 33 | 34 | Tell us what happens instead 35 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/config.yml: -------------------------------------------------------------------------------- 1 | blank_issues_enabled: false 2 | -------------------------------------------------------------------------------- /.github/workflows/ci.yaml: -------------------------------------------------------------------------------- 1 | name: CI 2 | 3 | on: 4 | push: 5 | branches: [main, develop] 6 | paths: 7 | - "pyneo4j_ogm/**" 8 | - "tests/**" 9 | pull_request: 10 | branches: [main, develop] 11 | 12 | jobs: 13 | lint: 14 | runs-on: ubuntu-latest 15 | env: 16 | POETRY_VERSION: "1.5.1" 17 | strategy: 18 | fail-fast: false 19 | matrix: 20 | python-version: ["3.10", "3.11"] 21 | 22 | steps: 23 | - uses: actions/checkout@v3 24 | 25 | - name: Set up Python ${{ matrix.python-version }} 26 | uses: actions/setup-python@v4 27 | with: 28 | python-version: ${{ matrix.python-version }} 29 | 30 | - name: Cache Poetry 31 | uses: actions/cache@v3 32 | with: 33 | path: ~/.cache/pypoetry 34 | key: ${{ runner.os }}-poetry-${{ hashFiles('**/poetry.lock') }} 35 | restore-keys: | 36 | ${{ runner.os }}-poetry- 37 | 38 | - name: View python version 39 | run: python --version 40 | 41 | - name: Install Python Poetry 42 | uses: abatilo/actions-poetry@v2.3.0 43 | 44 | - name: Configure poetry 45 | shell: bash 46 | run: poetry config virtualenvs.in-project true 47 | 48 | - name: View poetry version 49 | run: poetry --version 50 | 51 | - name: Install dependencies 52 | run: poetry install 53 | 54 | - name: Run linter 55 | run: poetry run pylint --rcfile=.pylintrc pyneo4j_ogm tests 56 | 57 | type-check: 58 | runs-on: ubuntu-latest 59 | env: 60 | POETRY_VERSION: "1.5.1" 61 | strategy: 62 | fail-fast: false 63 | matrix: 64 | python-version: ["3.10", "3.11", "3.12"] 65 | 66 | steps: 67 | - uses: actions/checkout@v3 68 | 69 | - name: Set up Python ${{ matrix.python-version }} 70 | uses: actions/setup-python@v4 71 | with: 72 | python-version: ${{ matrix.python-version }} 73 | 74 | - name: Cache Poetry 75 | uses: actions/cache@v3 76 | with: 77 | path: ~/.cache/pypoetry 78 | key: ${{ runner.os }}-poetry-${{ hashFiles('**/poetry.lock') }} 79 | restore-keys: | 80 | ${{ runner.os }}-poetry- 81 | 82 | - name: View python version 83 | run: python --version 84 | 85 | - name: Install Python Poetry 86 | uses: abatilo/actions-poetry@v2.3.0 87 | 88 | - name: Configure poetry 89 | shell: bash 90 | run: poetry config virtualenvs.in-project true 91 | 92 | - name: View poetry version 93 | run: poetry --version 94 | 95 | - name: Install dependencies 96 | run: poetry install 97 | 98 | - name: Run type-checker 99 | run: poetry run pyright pyneo4j_ogm tests 100 | 101 | test: 102 | runs-on: ubuntu-latest 103 | services: 104 | neo4j: 105 | image: neo4j:latest 106 | env: 107 | NEO4J_AUTH: ${{ secrets.NEO4J_AUTH }} 108 | ports: 109 | - 7687:7687 110 | env: 111 | POETRY_VERSION: "1.5.1" 112 | strategy: 113 | fail-fast: false 114 | matrix: 115 | python-version: ["3.10", "3.11", "3.12"] 116 | pydantic-version: ["1.10.9", "^2"] 117 | 118 | steps: 119 | - uses: actions/checkout@v3 120 | 121 | - name: Set up Python ${{ matrix.python-version }} 122 | uses: actions/setup-python@v4 123 | with: 124 | python-version: ${{ matrix.python-version }} 125 | 126 | - name: Cache Poetry 127 | uses: actions/cache@v3 128 | with: 129 | path: ~/.cache/pypoetry 130 | key: ${{ runner.os }}-poetry-${{ hashFiles('**/poetry.lock') }} 131 | restore-keys: | 132 | ${{ runner.os }}-poetry- 133 | 134 | - name: View python version 135 | run: python --version 136 | 137 | - name: Install Python Poetry 138 | uses: abatilo/actions-poetry@v2.3.0 139 | 140 | - name: Configure poetry 141 | shell: bash 142 | run: poetry config virtualenvs.in-project true 143 | 144 | - name: View poetry version 145 | run: poetry --version 146 | 147 | - name: Install dependencies 148 | run: | 149 | poetry install 150 | poetry add pydantic@${{ matrix.pydantic-version }} 151 | 152 | - name: View pydantic version 153 | run: poetry show pydantic 154 | 155 | - name: Run tests 156 | run: poetry run pytest tests --asyncio-mode=auto --cov=pyneo4j_ogm -W ignore::DeprecationWarning 157 | 158 | release: 159 | runs-on: ubuntu-latest 160 | needs: [lint, type-check, test] 161 | if: | 162 | github.event_name == 'push' && 163 | github.ref == 'refs/heads/main' && 164 | !contains ( github.event.head_commit.message, 'chore(release)' ) 165 | concurrency: release 166 | environment: 167 | name: release 168 | url: https://pypi.org/p/pyneo4j-ogm 169 | permissions: 170 | id-token: write 171 | contents: write 172 | 173 | steps: 174 | - uses: actions/checkout@v3 175 | with: 176 | fetch-depth: 0 177 | ref: ${{ github.ref_name }} 178 | 179 | - name: Python Semantic Release 180 | uses: python-semantic-release/python-semantic-release@master 181 | with: 182 | github_token: ${{ secrets.GITHUB_TOKEN }} 183 | 184 | - name: Publish package distributions to PyPI 185 | uses: pypa/gh-action-pypi-publish@release/v1 186 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | share/python-wheels/ 24 | *.egg-info/ 25 | .installed.cfg 26 | *.egg 27 | MANIFEST 28 | 29 | # PyInstaller 30 | # Usually these files are written by a python script from a template 31 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 32 | *.manifest 33 | *.spec 34 | 35 | # Installer logs 36 | pip-log.txt 37 | pip-delete-this-directory.txt 38 | 39 | # Unit test / coverage reports 40 | htmlcov/ 41 | .tox/ 42 | .nox/ 43 | .coverage 44 | .coverage.* 45 | .cache 46 | nosetests.xml 47 | coverage.xml 48 | *.cover 49 | *.py,cover 50 | .hypothesis/ 51 | .pytest_cache/ 52 | cover/ 53 | 54 | # Translations 55 | *.mo 56 | *.pot 57 | 58 | # Django stuff: 59 | *.log 60 | local_settings.py 61 | db.sqlite3 62 | db.sqlite3-journal 63 | 64 | # Flask stuff: 65 | instance/ 66 | .webassets-cache 67 | 68 | # Scrapy stuff: 69 | .scrapy 70 | 71 | # Sphinx documentation 72 | docs/_build/ 73 | 74 | # PyBuilder 75 | .pybuilder/ 76 | target/ 77 | 78 | # Jupyter Notebook 79 | .ipynb_checkpoints 80 | 81 | # IPython 82 | profile_default/ 83 | ipython_config.py 84 | 85 | # pyenv 86 | # For a library or package, you might want to ignore these files since the code is 87 | # intended to run in multiple environments; otherwise, check them in: 88 | # .python-version 89 | 90 | # pipenv 91 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 92 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 93 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 94 | # install all needed dependencies. 95 | #Pipfile.lock 96 | 97 | # poetry 98 | # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. 99 | # This is especially recommended for binary packages to ensure reproducibility, and is more 100 | # commonly ignored for libraries. 101 | # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control 102 | #poetry.lock 103 | 104 | # pdm 105 | # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. 106 | #pdm.lock 107 | # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it 108 | # in version control. 109 | # https://pdm.fming.dev/#use-with-ide 110 | .pdm.toml 111 | 112 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm 113 | __pypackages__/ 114 | 115 | # Celery stuff 116 | celerybeat-schedule 117 | celerybeat.pid 118 | 119 | # SageMath parsed files 120 | *.sage.py 121 | 122 | # Environments 123 | .env 124 | .venv 125 | env/ 126 | venv/ 127 | ENV/ 128 | env.bak/ 129 | venv.bak/ 130 | 131 | # Spyder project settings 132 | .spyderproject 133 | .spyproject 134 | 135 | # Rope project settings 136 | .ropeproject 137 | 138 | # mkdocs documentation 139 | /site 140 | 141 | # mypy 142 | .mypy_cache/ 143 | .dmypy.json 144 | dmypy.json 145 | 146 | # Pyre type checker 147 | .pyre/ 148 | 149 | # pytype static type analyzer 150 | .pytype/ 151 | 152 | # Cython debug symbols 153 | cython_debug/ 154 | 155 | # PyCharm 156 | # JetBrains specific template is maintained in a separate JetBrains.gitignore that can 157 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore 158 | # and can be added to the global gitignore or merged into this file. For a more nuclear 159 | # option (not recommended) you can uncomment the following to ignore the entire idea folder. 160 | #.idea/ 161 | 162 | # Docker volumes 163 | .neo4j_data 164 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | repos: 2 | - repo: https://github.com/pycqa/isort 3 | rev: 5.12.0 4 | hooks: 5 | - id: isort 6 | args: [--profile, black] 7 | stages: [pre-commit] 8 | 9 | - repo: https://github.com/psf/black 10 | rev: 23.10.0 11 | hooks: 12 | - id: black 13 | args: [--config, pyproject.toml] 14 | stages: [pre-commit] 15 | 16 | - repo: https://github.com/commitizen-tools/commitizen 17 | rev: 3.12.0 18 | hooks: 19 | - id: commitizen 20 | stages: [commit-msg] 21 | -------------------------------------------------------------------------------- /.pylintrc: -------------------------------------------------------------------------------- 1 | [MASTER] 2 | ignore=third_party 3 | persistent=no 4 | jobs=4 5 | unsafe-load-any-extension=no 6 | 7 | 8 | [MESSAGES CONTROL] 9 | disable=abstract-method, 10 | inconsistent-quotes, 11 | protected-access, 12 | broad-exception-caught, 13 | ungrouped-imports, 14 | bare-except, 15 | no-self-argument, 16 | keyword-arg-before-vararg, 17 | bar-except, 18 | apply-builtin, 19 | arguments-differ, 20 | attribute-defined-outside-init, 21 | backtick, 22 | bad-option-value, 23 | basestring-builtin, 24 | buffer-builtin, 25 | c-extension-no-member, 26 | consider-using-enumerate, 27 | cmp-builtin, 28 | cmp-method, 29 | coerce-builtin, 30 | coerce-method, 31 | delslice-method, 32 | div-method, 33 | duplicate-code, 34 | eq-without-hash, 35 | execfile-builtin, 36 | file-builtin, 37 | filter-builtin-not-iterating, 38 | fixme, 39 | getslice-method, 40 | global-statement, 41 | hex-method, 42 | idiv-method, 43 | implicit-str-concat, 44 | import-error, 45 | import-self, 46 | import-star-module-level, 47 | inconsistent-return-statements, 48 | input-builtin, 49 | intern-builtin, 50 | invalid-str-codec, 51 | locally-disabled, 52 | long-builtin, 53 | long-suffix, 54 | map-builtin-not-iterating, 55 | misplaced-comparison-constant, 56 | missing-function-docstring, 57 | metaclass-assignment, 58 | next-method-called, 59 | next-method-defined, 60 | no-absolute-import, 61 | no-else-break, 62 | no-else-continue, 63 | no-else-raise, 64 | no-else-return, 65 | no-init, 66 | no-member, 67 | no-name-in-module, 68 | no-self-use, 69 | nonzero-method, 70 | oct-method, 71 | old-division, 72 | old-ne-operator, 73 | old-octal-literal, 74 | old-raise-syntax, 75 | parameter-unpacking, 76 | print-statement, 77 | raising-string, 78 | range-builtin-not-iterating, 79 | raw_input-builtin, 80 | rdiv-method, 81 | reduce-builtin, 82 | relative-import, 83 | reload-builtin, 84 | round-builtin, 85 | setslice-method, 86 | signature-differs, 87 | standarderror-builtin, 88 | suppressed-message, 89 | sys-max-int, 90 | too-few-public-methods, 91 | too-many-ancestors, 92 | too-many-arguments, 93 | too-many-boolean-expressions, 94 | too-many-branches, 95 | too-many-instance-attributes, 96 | too-many-locals, 97 | too-many-nested-blocks, 98 | too-many-public-methods, 99 | too-many-return-statements, 100 | too-many-statements, 101 | trailing-newlines, 102 | unichr-builtin, 103 | unicode-builtin, 104 | unnecessary-pass, 105 | unpacking-in-except, 106 | useless-else-on-loop, 107 | useless-object-inheritance, 108 | useless-suppression, 109 | using-cmp-argument, 110 | wrong-import-order, 111 | xrange-builtin, 112 | zip-builtin-not-iterating, 113 | import-outside-toplevel, 114 | 115 | 116 | [REPORTS] 117 | output-format=colorized 118 | reports=no 119 | evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10) 120 | 121 | 122 | [BASIC] 123 | good-names=main,_ 124 | include-naming-hint=no 125 | property-classes=abc.abstractproperty,cached_property.cached_property,cached_property.threaded_cached_property,cached_property.cached_property_with_ttl,cached_property.threaded_cached_property_with_ttl 126 | function-rgx=^(?:(?PsetUp|tearDown|setUpModule|tearDownModule)|(?P_?[A-Z][a-zA-Z0-9]*)|(?P_?[a-z][a-z0-9_]*))$ 127 | variable-rgx=^[a-z][a-z0-9_]*$ 128 | const-rgx=^(_?[A-Z][A-Z0-9_]*|__[a-z0-9_]+__|_?[a-z][a-z0-9_]*)$ 129 | attr-rgx=^_{0,2}[a-z][a-z0-9_]*$ 130 | argument-rgx=^[a-z][a-z0-9_]*$ 131 | class-attribute-rgx=^(_?[A-Z][A-Z0-9_]*|__[a-z0-9_]+__|_?[a-zA-Z][a-zA-Z0-9_]*)$ 132 | inlinevar-rgx=^[a-z][a-z0-9_]*$ 133 | class-rgx=^_?[A-Z][a-zA-Z0-9]*$ 134 | module-rgx=^(_?[a-z][a-z0-9_]*|__init__)$ 135 | method-rgx=(?x)^(?:(?P_[a-z0-9_]+__|runTest|setUp|tearDown|setUpTestCase|tearDownTestCase|setupSelf|tearDownClass|setUpClass|(test|assert)_*[A-Z0-9][a-zA-Z0-9_]*|next)|(?P_{0,2}[A-Z][a-zA-Z0-9_]*)|(?P_{0,2}[a-z][a-z0-9_]*))$ 136 | no-docstring-rgx=(__.*__|main|test.*|.*test|.*Test)$ 137 | docstring-min-length=10 138 | 139 | 140 | [TYPECHECK] 141 | contextmanager-decorators=contextlib.contextmanager,contextlib2.contextmanager 142 | ignore-mixin-members=yes 143 | ignored-classes=optparse.Values,thread._local,_thread._local 144 | 145 | 146 | [FORMAT] 147 | max-line-length=120 148 | ignore-long-lines=(?x)( 149 | ^\s*(\#\ )??$| 150 | ^\s*(from\s+\S+\s+)?import\s+.+$) 151 | single-line-if-stmt=yes 152 | max-module-lines=99999 153 | indent-string=' ' 154 | indent-after-paren=4 155 | 156 | 157 | [MISCELLANEOUS] 158 | notes=TODO 159 | 160 | 161 | [STRING] 162 | check-quote-consistency=yes 163 | 164 | 165 | [VARIABLES] 166 | init-import=no 167 | dummy-variables-rgx=^\*{0,2}(_$|unused_|dummy_) 168 | callbacks=cb_,_cb 169 | redefining-builtins-modules=six,six.moves,past.builtins,future.builtins,functools 170 | 171 | 172 | [LOGGING] 173 | logging-modules=logging,absl.logging,tensorflow.io.logging 174 | 175 | 176 | [SIMILARITIES] 177 | min-similarity-lines=4 178 | ignore-comments=yes 179 | ignore-docstrings=yes 180 | ignore-imports=no 181 | 182 | 183 | [SPELLING] 184 | spelling-store-unknown-words=no 185 | 186 | 187 | [IMPORTS] 188 | deprecated-modules=regsub, 189 | TERMIOS, 190 | Bastion, 191 | rexec, 192 | sets 193 | known-third-party=enchant, absl 194 | analyse-fallback-blocks=no 195 | 196 | 197 | [CLASSES] 198 | defining-attr-methods=__init__, 199 | __new__, 200 | setUp 201 | exclude-protected=_asdict, 202 | _fields, 203 | _replace, 204 | _source, 205 | _make 206 | valid-classmethod-first-arg=cls, 207 | class_ 208 | valid-metaclass-classmethod-first-arg=mcs 209 | 210 | 211 | [EXCEPTIONS] 212 | overgeneral-exceptions=builtins.StandardError, 213 | builtins.Exception, 214 | builtins.BaseException 215 | -------------------------------------------------------------------------------- /.vscode/launch.json: -------------------------------------------------------------------------------- 1 | { 2 | // Use IntelliSense to learn about possible attributes. 3 | // Hover to view descriptions of existing attributes. 4 | // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387 5 | "version": "0.2.0", 6 | "configurations": [ 7 | { 8 | "name": "Python: Current File", 9 | "type": "python", 10 | "request": "launch", 11 | "program": "${file}", 12 | "console": "integratedTerminal", 13 | "justMyCode": false 14 | } 15 | ] 16 | } 17 | -------------------------------------------------------------------------------- /.vscode/settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "python.testing.pytestArgs": [ 3 | "tests", 4 | "--asyncio-mode=auto", 5 | "-W ignore::DeprecationWarning", 6 | ], 7 | "python.testing.unittestEnabled": false, 8 | "python.testing.pytestEnabled": true, 9 | "markdownlint.config": { 10 | "MD024": false, 11 | "MD039": false, 12 | "MD041": false 13 | }, 14 | "files.exclude": { 15 | "**/.git": true, 16 | "**/.svn": true, 17 | "**/.hg": true, 18 | "**/CVS": true, 19 | "**/.DS_Store": true, 20 | "**/Thumbs.db": true, 21 | "dist": true, 22 | ".pytest_cache": true, 23 | ".neo4j_data": true, 24 | ".coverage": true, 25 | ".venv": true 26 | }, 27 | "cSpell.words": [ 28 | "pyneo" 29 | ] 30 | } 31 | -------------------------------------------------------------------------------- /.vscode/tasks.json: -------------------------------------------------------------------------------- 1 | { 2 | // See https://go.microsoft.com/fwlink/?LinkId=733558 3 | // for the documentation about the tasks.json format 4 | "version": "2.0.0", 5 | "tasks": [ 6 | { 7 | "label": "Pytest", 8 | "type": "shell", 9 | "command": "poetry run pytest tests --asyncio-mode=auto --cov=pyneo4j_ogm --cov-report=html:tests/coverage -W ignore::DeprecationWarning", 10 | "group": { 11 | "kind": "test", 12 | "isDefault": true 13 | }, 14 | "presentation": { 15 | "reveal": "always", 16 | "panel": "shared", 17 | "showReuseMessage": true 18 | } 19 | }, 20 | { 21 | "label": "Pyright", 22 | "type": "shell", 23 | "command": "poetry run pyright pyneo4j_ogm tests", 24 | "group": { 25 | "kind": "none" 26 | }, 27 | "presentation": { 28 | "reveal": "always", 29 | "panel": "shared", 30 | "showReuseMessage": true 31 | } 32 | }, 33 | { 34 | "label": "Pylint", 35 | "type": "shell", 36 | "command": "poetry run pylint --rcfile=.pylintrc pyneo4j_ogm tests", 37 | "group": { 38 | "kind": "none" 39 | }, 40 | "presentation": { 41 | "reveal": "always", 42 | "panel": "shared", 43 | "showReuseMessage": true 44 | } 45 | } 46 | ] 47 | } 48 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2023 Marc Troisner 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: "3.8" 2 | 3 | services: 4 | neo4j: 5 | hostname: neo4j 6 | image: neo4j:latest 7 | restart: always 8 | ports: 9 | - 7687:7687 10 | - 7474:7474 11 | volumes: 12 | - .neo4j_data:/data 13 | environment: 14 | NEO4J_AUTH: neo4j/password 15 | -------------------------------------------------------------------------------- /docs/Concept.md: -------------------------------------------------------------------------------- 1 | ## Basic concepts 2 | 3 | As you might have guessed by now, `pyneo4j-ogm` is a library that allows you to interact with a Neo4j database using Python. It is designed to make your life as simple as possible, while still providing the most common operations and some more advanced features. 4 | 5 | But first, how does this even work!?! Well, the basic concept boils down to the following: 6 | 7 | - You define your models that represent your nodes and relationships inside the graph. 8 | - You use these models to do all sorts of things with your data. 9 | 10 | Of course, there is a lot more to it than that, but this is the basic idea. So let's take a closer look at the different parts of `pyneo4j-ogm` and how to use them. 11 | 12 | > **Note:** All of the examples in this documentation assume that you have already connected to a database and registered your models with the client like shown in the [`quickstart guide`](https://github.com/groc-prog/pyneo4j-ogm/blob/develop?tab=readme-ov-file#-quickstart). The models used in the following examples will build upon the ones defined there. If you are new to [`Neo4j`](https://neo4j.com/docs/) or [`Cypher`](https://neo4j.com/docs/cypher-manual/current/) in general, you should get a basic understanding of how to use them before continuing. 13 | 14 | ### A note on Pydantic version support 15 | 16 | As of version [`v0.3.0`](https://github.com/groc-prog/pyneo4j-ogm/blob/develop/CHANGELOG.md#whats-changed-in-v030-2023-11-30), pyneo4j-ogm now supports both `Pydantic 1.10+ and 2+`. All core features of pydantic should work, meaning full support for model serialization, validation and schema generation. 17 | 18 | Should you find any issues or run into any problems, feel free to open a issue! 19 | -------------------------------------------------------------------------------- /docs/DatabaseClient.md: -------------------------------------------------------------------------------- 1 | ## Database client 2 | 3 | This is where the magic happens! The `Pyneo4jClient` is the main entry point for interacting with the database. It handles all the heavy lifting for you and your models. Because of this, we have to always have at least one client initialized before doing anything else. 4 | 5 | ### Connecting to the database 6 | 7 | Before you can run any queries, you have to connect to a database. This is done by calling the `connect()` method of the `Pyneo4jClient` instance. The `connect()` method takes a few arguments: 8 | 9 | - `uri`: The connection URI to the database. 10 | - `skip_constraints`: Whether the client should skip creating any constraints defined on models when registering them. Defaults to `False`. 11 | - `skip_indexes`: Whether the client should skip creating any indexes defined on models when registering them. Defaults to `False`. 12 | - `*args`: Additional arguments that are passed directly to Neo4j's `AsyncDriver.driver()` method. 13 | - `**kwargs`: Additional keyword arguments that are passed directly to Neo4j's `AsyncDriver.driver()` method. 14 | 15 | ```python 16 | from pyneo4j_ogm import Pyneo4jClient 17 | 18 | client = Pyneo4jClient() 19 | await client.connect(uri="", auth=("", ""), max_connection_pool_size=10, ...) 20 | 21 | ## Or chained right after the instantiation of the class 22 | client = await Pyneo4jClient().connect(uri="", auth=("", ""), max_connection_pool_size=10, ...) 23 | ``` 24 | 25 | After connecting the client, you will be able to run any cypher queries against the database. Should you try to run a query without connecting to a database first (it happens to the best of us), you will get a `NotConnectedToDatabase` exception. 26 | 27 | ### Closing an existing connection 28 | 29 | Connections can explicitly be closed by calling the `close()` method. This will close the connection to the database and free up any resources used by the client. Remember to always close your connections when you are done with them! 30 | 31 | ```python 32 | ## Do some heavy-duty work... 33 | 34 | ## Finally done, so we close the connection to the database. 35 | await client.close() 36 | ``` 37 | 38 | Once you closed the client, it will be seen as `disconnected` and if you try to run any further queries with it, you will get a `NotConnectedToDatabase` exception 39 | 40 | ### Registering models 41 | 42 | Models are a core feature of pyneo4j-ogm, and therefore you probably want to use some. But to work with them, they have to be registered with the client by calling the `register_models()` method and passing in your models as a list: 43 | 44 | ```python 45 | ## Create a new client instance and connect ... 46 | 47 | await client.register_models([Developer, Coffee, Consumed]) 48 | ``` 49 | 50 | or by providing the path to a directory holding all your models. The `register_models_from_directory()` method will automatically discover all models in the directory and all of it's subdirectories and register them: 51 | 52 | ```python 53 | ## Create a new client instance and connect ... 54 | 55 | await client.register_models_from_directory("path/to/models") 56 | ``` 57 | 58 | This is a crucial step, because if you don't register your models with the client, you won't be able to work with them in any way. Should you try to work with a model that has not been registered, you will get a `UnregisteredModel` exception. This exception also gets raised if a database model defines a relationship-property with other (unregistered) models as a target or relationship model and then runs a query with said relationship-property. 59 | 60 | If you have defined any indexes or constraints on your models, they will be created automatically when registering them. You can prevent this behavior by passing `skip_constraints=True` or `skip_indexes=True` to the `connect()` method. If you do this, you will have to create the indexes and constraints yourself. 61 | 62 | > **Note**: If you don't register your models with the client, you will still be able to run cypher queries directly with the client, but you will `lose automatic model resolution` from queries. This means that, instead of resolved models, the raw Neo4j query results are returned. 63 | 64 | ### Executing Cypher queries 65 | 66 | Models aren't the only things capable of running queries. The client can also be used to run queries, with some additional functionality to make your life easier. 67 | 68 | Node- and RelationshipModels provide many methods for commonly used cypher queries, but sometimes you might want to execute a custom cypher with more complex logic. For this purpose, the client instance provides a `cypher()` method that allows you to execute custom cypher queries. The `cypher()` method takes three arguments: 69 | 70 | - `query`: The cypher query to execute. 71 | - `parameters`: A dictionary containing the parameters to pass to the query. 72 | - `resolve_models`: Whether the client should try to resolve the models from the query results. Defaults to `True`. 73 | 74 | This method will always return a tuple containing a list of results and a list of variables returned by the query. Internally, the client uses the `.values()` method of the Neo4j driver to get the results of the query. 75 | 76 | > **Note:** If no models have been registered with the client and resolve_models is set to True, the client will not raise any exceptions but rather return the raw query results. 77 | 78 | Here is an example of how to execute a custom cypher query: 79 | 80 | ```python 81 | results, meta = await client.cypher( 82 | query="CREATE (d:Developer {uid: '553ac2c9-7b2d-404e-8271-40426ae80de0', name: 'John', age: 25}) RETURN d.name as developer_name, d.age", 83 | parameters={"name": "John Doe"}, 84 | resolve_models=False, ## Explicitly disable model resolution 85 | ) 86 | 87 | print(results) ## [["John", 25]] 88 | print(meta) ## ["developer_name", "d.age"] 89 | ``` 90 | 91 | ### Batching cypher queries 92 | 93 | We provide an easy way to batch multiple database queries together, regardless of whether you are using the client directly or via a model method. To do this you can use the `batch()` method, which has to be called with a asynchronous context manager like in the following example: 94 | 95 | ```python 96 | async with client.batch(): 97 | ## All queries executed inside the context manager will be batched into a single transaction 98 | ## and executed once the context manager exits. If any of the queries fail, the whole transaction 99 | ## will be rolled back. 100 | await client.cypher( 101 | query="CREATE (d:Developer {uid: $uid, name: $name, age: $age})", 102 | parameters={"uid": "553ac2c9-7b2d-404e-8271-40426ae80de0", "name": "John Doe", "age": 25}, 103 | ) 104 | await client.cypher( 105 | query="CREATE (c:Coffee {flavour: $flavour, milk: $milk, sugar: $sugar})", 106 | parameters={"flavour": "Espresso", "milk": False, "sugar": False}, 107 | ) 108 | 109 | ## Model queries also can be batched together without any extra work! 110 | coffee = await Coffee(flavour="Americano", milk=False, sugar=False).create() 111 | ``` 112 | 113 | You can batch anything that runs a query, be that a model method, a custom query or a relationship-property method. If any of the queries fail, the whole transaction will be rolled back and an exception will be raised. 114 | 115 | ### Using bookmarks (Enterprise Edition only) 116 | 117 | If you are using the Enterprise Edition of Neo4j, you can use bookmarks to keep track of the last transaction that has been committed. The client provides a `last_bookmarks` property that allows you to get the bookmarks from the last session. These bookmarks can be used in combination with the `use_bookmarks()` method. Like the `batch()` method, the `use_bookmarks()` method has to be called with a context manager. All queries run inside the context manager will use the bookmarks passed to the `use_bookmarks()` method. Here is an example of how to use bookmarks: 118 | 119 | ```python 120 | ## Create a new node and get the bookmarks from the last session 121 | await client.cypher("CREATE (d:Developer {name: 'John Doe', age: 25})") 122 | bookmarks = client.last_bookmarks 123 | 124 | ## Create another node, but this time don't get the bookmark 125 | ## When we use the bookmarks from the last session, this node will not be visible 126 | await client.cypher("CREATE (c:Coffee {flavour: 'Espresso', milk: False, sugar: False})") 127 | 128 | with client.use_bookmarks(bookmarks=bookmarks): 129 | ## All queries executed inside the context manager will use the bookmarks 130 | ## passed to the `use_bookmarks()` method. 131 | 132 | ## Here we will only see the node created in the first query 133 | results, meta = await client.cypher("MATCH (n) RETURN n") 134 | 135 | ## Model queries also can be batched together without any extra work! 136 | ## This will return no results, since the coffee node was created after 137 | ## the bookmarks were taken. 138 | coffee = await Coffee.find_many() 139 | print(coffee) ## [] 140 | ``` 141 | 142 | ### Manual indexing and constraints 143 | 144 | Most of the time, the creation of indexes/constraints will be handled by the models themselves. But it can still be handy to have a simple way of creating new ones. This is where the `create_lookup_index()`, `create_range_index`, `create_text_index`, `create_point_index` and `create_uniqueness_constraint()` methods come in. 145 | 146 | First, let's take a look at how to create a custom index in the database. The `create_range_index`, `create_text_index` and `create_point_index` methods take a few arguments: 147 | 148 | - `name`: The name of the index to create (Make sure this is unique!). 149 | - `entity_type`: The entity type the index is created for. Can be either **EntityType.NODE** or **EntityType.RELATIONSHIP**. 150 | - `properties`: A list of properties to create the index for. 151 | - `labels_or_type`: The node labels or relationship type the index is created for. 152 | 153 | The `create_lookup_index()` takes the same arguments, except for the `labels_or_type` and `properties` arguments. 154 | 155 | The `create_uniqueness_constraint()` method also takes similar arguments. 156 | 157 | - `name`: The name of the constraint to create. 158 | - `entity_type`: The entity type the constraint is created for. Can be either **EntityType.NODE** or **EntityType.RELATIONSHIP**. 159 | - `properties`: A list of properties to create the constraint for. 160 | - `labels_or_type`: The node labels or relationship type the constraint is created for. 161 | 162 | Here is an example of how to use the methods: 163 | 164 | ```python 165 | ## Creates a `RANGE` index for a `Coffee's` `sugar` and `flavour` properties 166 | await client.create_range_index("hot_beverage_index", EntityType.NODE, ["sugar", "flavour"], ["Beverage", "Hot"]) 167 | 168 | ## Creates a UNIQUENESS constraint for a `Developer's` `uid` property 169 | await client.create_uniqueness_constraint("developer_constraint", EntityType.NODE, ["uid"], ["Developer"]) 170 | ``` 171 | 172 | ### Client utilities 173 | 174 | The client also provides some additional utility methods, which mostly exist for convenience when writing tests or setting up environments: 175 | 176 | - `is_connected()`: Returns whether the client is currently connected to a database. 177 | - `drop_nodes()`: Drops all nodes from the database. 178 | - `drop_constraints()`: Drops all constraints from the database. 179 | - `drop_indexes()`: Drops all indexes from the database. 180 | -------------------------------------------------------------------------------- /docs/Logging.md: -------------------------------------------------------------------------------- 1 | ## Logging 2 | 3 | You can control the log level and whether to log to the console or not by setting the `PYNEO4J_OGM_LOG_LEVEL` and `PYNEO4J_OGM_ENABLE_LOGGING` as environment variables. The available levels are the same as provided by the build-in `logging` module. The default log level is `WARNING` and logging to the console is enabled by default. 4 | -------------------------------------------------------------------------------- /docs/Migrations.md: -------------------------------------------------------------------------------- 1 | ## Migrations 2 | 3 | As of version [`v0.5.0`](https://github.com/groc-prog/pyneo4j-ogm/blob/develop/CHANGELOG.md#whats-changed-in-v050-2024-02-06), pyneo4j-ogm supports migrations using a built-in migration tool. The migration tool is basic but flexibly, which should cover most use-cases. 4 | 5 | ### Initializing migrations for your project 6 | 7 | To initialize migrations for your project, you can use the `poetry run pyneo4j_ogm init` command. This will create a `migrations` directory at the given path (which defaults to `./migrations`), which will contain all your migration files. 8 | 9 | ```bash 10 | poetry run pyneo4j_ogm init --migration-dir ./my/custom/migration/path 11 | ``` 12 | 13 | ### Creating a new migration 14 | 15 | To create a new migration, you can use the `poetry run pyneo4j_ogm create` command. This will create a new migration file inside the `migrations` directory. The migration file will contain a `up` and `down` function, which you can use to define your migration. 16 | 17 | ```bash 18 | poetry run pyneo4j_ogm create my_first_migration 19 | ``` 20 | 21 | Both the `up` and `down` functions will receive the client used during the migration as their only arguments. This makes the migrations pretty flexible, since you can not only use the client to execute queries, but also register models on it and use them to execute methods. 22 | 23 | > **Note**: When using models inside the migration, you have to make sure that the model used implements the same data structure as the data inside the graph. Otherwise you might run into validation issues. 24 | 25 | ```python 26 | """ 27 | Auto-generated migration file {name}. Do not 28 | rename this file or the `up` and `down` functions. 29 | """ 30 | from pyneo4j_ogm import Pyneo4jClient 31 | 32 | 33 | async def up(client: Pyneo4jClient) -> None: 34 | """ 35 | Write your `UP migration` here. 36 | """ 37 | await client.cypher("CREATE (n:Node {name: 'John'})") 38 | 39 | 40 | async def down(client: Pyneo4jClient) -> None: 41 | """ 42 | Write your `DOWN migration` here. 43 | """ 44 | await client.cypher("MATCH (n:Node {name: 'John'}) DELETE n") 45 | ``` 46 | 47 | ### Running migrations 48 | 49 | To run the migrations, you can use the `up` or `down` commands. The `up` command will run all migrations that have not been run yet, while the `down` command will run all migrations in reverse order. 50 | 51 | Both commands support a `--up-count` or `--down-count` argument, which can be used to limit the number of migrations to run. By default, the `up` command will run `all pending migration` and the `down` command will roll back the `last migration`. 52 | 53 | ```bash 54 | poetry run pyneo4j_ogm up --up-count 3 55 | poetry run pyneo4j_ogm down --down-count 2 56 | ``` 57 | 58 | ### Listing migrations 59 | 60 | The current state of all migrations can be viewed anytime using the `status` command. This will show you all migrations that have been run and all migrations that are pending. 61 | 62 | ```bash 63 | poetry run pyneo4j_ogm status 64 | 65 | ## Output 66 | ┌─────────────────────────────────────────┬─────────────────────┐ 67 | │ Migration │ Applied At │ 68 | ├─────────────────────────────────────────┼─────────────────────┤ 69 | │ 20160608155948-my_awesome_migration │ 2022-03-04 15:40:22 │ 70 | │ 20160608155948-my_fixed_migration │ 2022-03-04 15:41:13 │ 71 | │ 20160608155948-final_fix_i_swear │ PENDING │ 72 | └─────────────────────────────────────────┴─────────────────────┘ 73 | ``` 74 | 75 | ### Programmatic usage 76 | 77 | The migration tool can also be used programmatically. This can be useful if you want to run migrations inside your application or if you want to integrate the migration tool into your own CLI. 78 | 79 | ```python 80 | import asyncio 81 | from pyneo4j_ogm.migrations import create, down, init, status, up 82 | 83 | ## Call with same arguments as you would with cli 84 | init(migration_dir="./my/custom/migration/path") 85 | 86 | create("my_first_migration") 87 | asyncio.run(up()) 88 | ``` 89 | -------------------------------------------------------------------------------- /docs/Query.md: -------------------------------------------------------------------------------- 1 | 2 | ## Queries 3 | 4 | As you might have seen by now, `pyneo4j-ogm` provides a variate of methods to query the graph. If you followed the documentation up until this point, you might have seen that most of the methods take a `filters` argument. 5 | 6 | If you have some `prior experience` with `Neo4j and Cypher`, you may know that it does not provide a easy way to generate queries from given inputs. This is where `pyneo4j-ogm` comes in. It provides a `variety of filters` to make querying the graph as easy as possible. 7 | 8 | The filters are heavily inspired by [`MongoDB's query language`](https://docs.mongodb.com/manual/tutorial/query-documents/), so if you have some experience with that, you will feel right at home. 9 | 10 | This is really nice to have, not only for normal usage, but especially if you are developing a `gRPC service` or `REST API` and want to provide a way to query the graph from the outside. 11 | 12 | But enough of that, let's take a look at the different filters available to you. 13 | 14 | ### Filtering queries 15 | 16 | Since the filters are inspired by MongoDB's query language, they are also very similar. The filters are defined as dictionaries, where the keys are the properties you want to filter on and the values are the values you want to filter for. 17 | 18 | We can roughly separate them into the `following categories`: 19 | 20 | - Comparison operators 21 | - String operators 22 | - List operators 23 | - Logical operators 24 | - Element operators 25 | 26 | #### Comparison operators 27 | 28 | Comparison operators are used to compare values to each other. They are the most basic type of filter. 29 | 30 | | Operator | Description | Corresponding Cypher query | 31 | | --- | --- | --- | 32 | | `$eq` | Matches values that are equal to a specified value. | `WHERE node.property = value` | 33 | | `$neq` | Matches all values that are not equal to a specified value. | `WHERE node.property <> value` | 34 | | `$gt` | Matches values that are greater than a specified value. | `WHERE node.property > value` | 35 | | `$gte` | Matches values that are greater than or equal to a specified value. | `WHERE node.property >= value` | 36 | | `$lt` | Matches values that are less than a specified value. | `WHERE node.property < value` | 37 | | `$lte` | Matches values that are less than or equal to a specified value. | `WHERE node.property <= value` | 38 | 39 | #### String operators 40 | 41 | String operators are used to compare string values to each other. 42 | 43 | | Operator | Description | Corresponding Cypher query | 44 | | --- | --- | --- | 45 | | `$contains` | Matches values that contain a specified value. | `WHERE node.property CONTAINS value` | 46 | | `$icontains` | Matches values that contain a specified case insensitive value. | `WHERE toLower(node.property) CONTAINS toLower(value)` | 47 | | `$startsWith` | Matches values that start with a specified value. | `WHERE node.property STARTS WITH value` | 48 | | `$istartsWith` | Matches values that start with a specified case insensitive value. | `WHERE toLower(node.property) STARTS WITH toLower(value)` | 49 | | `$endsWith` | Matches values that end with a specified value. | `WHERE node.property ENDS WITH value` | 50 | | `$iendsWith` | Matches values that end with a specified case insensitive value. | `WHERE toLower(node.property) ENDS WITH toLower(value)` | 51 | | `$regex` | Matches values that match a specified regular expression (Regular expressions used by Neo4j and Cypher). | `WHERE node.property =~ value` | 52 | 53 | #### List operators 54 | 55 | List operators are used to compare list values to each other. 56 | 57 | | Operator | Description | Corresponding Cypher query | 58 | | --- | --- | --- | 59 | | `$in` | Matches lists where at least one item is in the given list. | `WHERE ANY(i IN node.property WHERE i IN value)` | 60 | | `$nin` | Matches lists where no items are in the given list | `WHERE NONE(i IN node.property WHERE i IN value)` | 61 | | `$all` | Matches lists where all items are in the given list. | `WHERE ALL(i IN node.property WHERE i IN value)` | 62 | | `$size` | Matches lists where the size of the list is equal to the given value. | `WHERE size(node.property) = value` | 63 | 64 | > **Note**: The `$size` operator can also be combined with the comparison operators by nesting them inside the `$size` operator. For example: `{"$size": {"$gt": 5}}`. 65 | 66 | #### Logical operators 67 | 68 | Logical operators are used to combine multiple filters with each other. 69 | 70 | | Operator | Description | Corresponding Cypher query | 71 | | --- | --- | --- | 72 | | `$and` | Joins query clauses with a logical AND returns all nodes that match the conditions of both clauses (Used by default if multiple filters are present). | `WHERE node.property1 = value1 AND node.property2 = value2` | 73 | | `$or` | Joins query clauses with a logical OR returns all nodes that match the conditions of either clause. | `WHERE node.property1 = value1 OR node.property2 = value2` | 74 | | `$xor` | Joins query clauses with a logical XOR returns all nodes that match the conditions of either clause but not both. | `WHERE WHERE node.property1 = value1 XOR node.property2 = value2` | 75 | | `$not` | Inverts the effect of a query expression nested within and returns nodes that do not match the query expression. | `WHERE NOT (node.property = value)` | 76 | 77 | #### Element operators 78 | 79 | Element operators are a special kind of operator not available for every filter type. They are used to check Neo4j-specific values. 80 | 81 | | Operator | Description | Corresponding Cypher query | 82 | | --- | --- | --- | 83 | | `$exists` | Matches nodes that have the specified property. | `WHERE EXISTS(node.property)` | 84 | | `$elementId` | Matches nodes that have the specified element id. | `WHERE elementId(node) = value` | 85 | | `$id` | Matches nodes that have the specified id. | `WHERE id(node) = value` | 86 | | `$labels` | Matches nodes that have the specified labels. | `WHERE ALL(i IN labels(n) WHERE i IN value)` | 87 | | `$type` | Matches relationships that have the specified type. Can be either a list or a string. | For a string: `WHERE type(r) = value`, For a list: `WHERE type(r) IN value` | 88 | 89 | #### Pattern matching 90 | 91 | The filters we have seen so far are great for simple queries, but what if we need to filter our nodes based on relationships to other nodes? This is where `pattern matching` comes in. Pattern matching allows us to define a `pattern` of nodes and relationships we want to match (or ignore). This is done by defining a `list of patterns` inside the `$patterns` key of the filter. Here is a short summary of the available operators inside a pattern: 92 | 93 | - `$node`: Filters applied to the target node. Expects a dictionary containing basic filters. 94 | - `$relationship`: Filters applied to the relationship between the source node and the target node. Expects a dictionary containing basic filters. 95 | - `$direction`: The direction of the pattern. Can be either INCOMING,OUTGOING or BOTH. 96 | - `$exists`: A boolean value indicating whether the pattern must exist or not. 97 | 98 | > **Note**: The `$patterns` key can only be used inside the `root filter` and not inside nested filters. Furthermore, only patterns across a single hop are supported. 99 | 100 | To make this as easy to understand as possible, we are going to take a look at a quick example. Let's say our `Developer` can define relationships to his `Coffee`. We want to get all `Developers` who `don't drink` their coffee `with sugar`: 101 | 102 | ```python 103 | developers = await Developer.find_many({ 104 | "$patterns": [ 105 | { 106 | ## The `$exists` operator tells the library to match/ignore the pattern 107 | "$exists": False, 108 | ## The defines the direction of the relationship inside the pattern 109 | "$direction": RelationshipMatchDirection.OUTGOING, 110 | ## The `$node` key is used to define the node we want to filter for. This means 111 | ## the filters inside the `$node` key will be applied to our `Coffee` nodes 112 | "$node": { 113 | "$labels": ["Beverage", "Hot"], 114 | "sugar": False 115 | }, 116 | ## The `$relationship` key is used to filter the relationship between the two nodes 117 | ## It can also define property filters for the relationship 118 | "$relationship": { 119 | "$type": "CHUGGED" 120 | } 121 | } 122 | ] 123 | }) 124 | ``` 125 | 126 | We can take this even further by defining multiple patters inside the `$patterns` key. Let's say this time our `Developer` can have some other `Developer` friends and we want to get all `Developers` who liked their coffee. At the same time, our developer must be `FRIENDS_WITH` (now the relationship is an incoming one, because why not?) a developer named `Jenny`: 127 | 128 | ```python 129 | developers = await Developer.find_many({ 130 | "$patterns": [ 131 | { 132 | "$exists": True, 133 | "$direction": RelationshipMatchDirection.OUTGOING, 134 | "$node": { 135 | "$labels": ["Beverage", "Hot"], 136 | }, 137 | "$relationship": { 138 | "$type": "CHUGGED", 139 | "liked": True 140 | } 141 | }, 142 | { 143 | "$exists": True, 144 | "$direction": RelationshipMatchDirection.INCOMING, 145 | "$node": { 146 | "$labels": ["Developer"], 147 | "name": "Jenny" 148 | }, 149 | "$relationship": { 150 | "$type": "FRIENDS_WITH" 151 | } 152 | } 153 | ] 154 | }) 155 | ``` 156 | 157 | #### Multi-hop filters 158 | 159 | Multi-hop filters are a special type of filter which is only available for [`NodeModelInstance.find_connected_nodes()`](https://github.com/groc-prog/pyneo4j-ogm/blob/develop/docs/Models.md#nodemodelsettingsfind_connected_nodes()). They allow you to specify filter parameters on the target node and all relationships between them over, you guessed it, multiple hops. To define this filter, you have a few operators you can define: 160 | 161 | - `$node`: Filters applied to the target node. Expects a dictionary containing basic filters. Can not contain pattern yet. 162 | - `$minHops`: The minimum number of hops between the source node and the target node. Must be greater than 0. 163 | - `$maxHops`: The maximum number of hops between the source node and the target node. You can pass "\*" as a value to define no upper limit. Must be greater than 1. 164 | - `$relationships`: A list of relationship filters. Each filter is a dictionary containing basic filters and must define a $type operator. 165 | 166 | ```python 167 | ## Picture a structure like this inside the graph: 168 | ## (:Producer)-[:SELLS_TO]->(:Barista)-[:PRODUCES {with_love: bool}]->(:Coffee)-[:CONSUMED_BY]->(:Developer) 169 | 170 | ## If we want to get all `Developer` nodes connected to a `Producer` node over the `Barista` and `Coffee` nodes, 171 | ## where the `Barista` created the coffee with love. 172 | 173 | ## Let's say, for the sake of this example, that there are connections possible 174 | ## with 10+ hops, but we don't want to include them. To solve this, we can define 175 | ## a `$maxHops` filter with a value of `10`. 176 | producer = await Producer.find_one({"name": "Coffee Inc."}) 177 | 178 | if producer is None: 179 | ## No producer found, do something else 180 | 181 | developers = await producer.find_connected_nodes({ 182 | "$maxHops": 10, 183 | "$node": { 184 | "$labels": ["Developer", "Python"], 185 | ## You can use all available filters here as well 186 | }, 187 | ## You can define filters on specific relationships inside the path 188 | "$relationships": [ 189 | { 190 | ## Here we define a filter for all `PRODUCES` relationships 191 | ## Only nodes where the with_love property is set to `True` will be returned 192 | "$type": "PRODUCES", 193 | "with_love": True 194 | } 195 | ] 196 | }) 197 | 198 | print(developers) ## [, , ...] 199 | 200 | ## Or if no matches were found 201 | print(developers) ## [] 202 | ``` 203 | 204 | ### Projections 205 | 206 | Projections are used to only return specific parts of the models as dictionaries. They are defined as a dictionary where the key is the name of the property in the returned dictionary and the value is the name of the property on the model instance. 207 | 208 | Projections can help you to reduce bandwidth usage and speed up queries, since you only return the data you actually need. 209 | 210 | > **Note:** Only top-level mapping is supported. This means that you can not map properties to a nested dictionary key. 211 | 212 | In the following example, we will return a dictionary with a `dev_name` key, which get's mapped to the models `name` property and a `dev_age` key, which get's mapped to the models `age` property. Any defined mapping which does not exist on the model will have `None` as it's value. You can also map the result's `elementId` and `Id` using either `$elementId` or `$id` as the value for the mapped key. 213 | 214 | ```python 215 | developer = await Developer.find_one({"name": "John"}, {"dev_name": "name", "dev_age": "age", "i_do_not_exist": "some_non_existing_property"}) 216 | 217 | print(developer) ## {"dev_name": "John", "dev_age": 24, "i_do_not_exist": None} 218 | ``` 219 | 220 | ### Query options 221 | 222 | Query options are used to define how results are returned from the query. They provide some basic functionality for easily implementing pagination, sorting, etc. They are defined as a dictionary where the key is the name of the option and the value is the value of the option. The following options are available: 223 | 224 | - `limit`: Limits the number of returned results. 225 | - `skip`: Skips the first `n` results. 226 | - `sort`: Sorts the results by the given property. Can be either a string or a list of strings. If a list is provided, the results will be sorted by the first property and then by the second property, etc. 227 | - `order`: Defines the sort direction. Can be either `ASC` or `DESC`. Defaults to `ASC`. 228 | 229 | ```python 230 | ## Returns 50 results, skips the first 10 and sorts them by the `name` property in descending order 231 | developers = await Developer.find_many({}, options={"limit": 50, "skip": 10, "sort": "name", "order": QueryOptionsOrder.DESCENDING}) 232 | 233 | print(len(developers)) ## 50 234 | print(developers) ## [, , ...] 235 | ``` 236 | 237 | ### Auto-fetching relationship-properties 238 | 239 | You have the option to automatically fetch all defined relationship-properties of matched nodes. This will populate the `instance..nodes` attribute with the fetched nodes. This can be useful in situations where you need to fetch a specific node and get all of it's related nodes at the same time. 240 | 241 | > **Note**: Auto-fetching nodes with many relationships can be very expensive and slow down your queries. Use it with caution. 242 | 243 | To enable this behavior, you can either set the `auto_fetch_nodes` parameter to `True` or set the `auto_fetch_nodes setting` in the model settings to `True`, but doing so will `always enable auto-fetching`. 244 | 245 | You can also define which relationship-properties to fetch by providing the fetched models to the `auto_fetch_models` parameter. This can be useful if you only want to fetch specific relationship-properties. 246 | 247 | Now, let's take a look at an example: 248 | 249 | ```python 250 | ## Fetches everything defined in the relationship-properties of the current matched node 251 | developer = await Developer.find_one({"name": "John"}, auto_fetch_nodes=True) 252 | 253 | ## All nodes for all defined relationship-properties are now fetched 254 | print(developer.coffee.nodes) ## [, , ...] 255 | print(developer.developer.nodes) ## [, , ...] 256 | print(developer.other_property.nodes) ## [, , ...] 257 | ``` 258 | 259 | With the `auto_fetch_models` parameter, we can define which relationship-properties to fetch: 260 | 261 | ```python 262 | ## Only fetch nodes for `Coffee` and `Developer` models defined in relationship-properties 263 | ## The models can also be passed as strings, where the string is the model's name 264 | developer = await Developer.find_one({"name": "John"}, auto_fetch_nodes=True, auto_fetch_models=[Coffee, "Developer"]) 265 | 266 | ## Only the defined models have been fetched 267 | print(developer.coffee.nodes) ## [, , ...] 268 | print(developer.developer.nodes) ## [, , ...] 269 | print(developer.other_property.nodes) ## [] 270 | ``` 271 | -------------------------------------------------------------------------------- /docs/RelationshipProperty.md: -------------------------------------------------------------------------------- 1 | ## Relationship-properties 2 | 3 | > **Note**: Relationship-properties are only available for classes which inherit from the `NodeModel` class. 4 | 5 | Relationship-properties are a special type of property that can only be defined on a `NodeModel` class. They can be used to define relationships between nodes and other models. They provide a variate of options to fine-tune the relationship and how it behaves. The options are pretty self-explanatory, but let's go through them anyway: 6 | 7 | ```python 8 | class Developer(NodeModel): 9 | 10 | ## Here we define a relationship to one or more `Coffee` nodes, both the target 11 | ## and relationship-model can be defined as strings (Has to be the exact name of the model) 12 | 13 | ## Notice that the `RelationshipProperty` class takes two type arguments, the first 14 | ## one being the target model and the second one being the relationship-model 15 | ## Can can get away without defining these, but it is recommended to do so for 16 | ## better type hinting 17 | coffee: RelationshipProperty["Coffee", "Consumed"] = RelationshipProperty( 18 | ## The target model is the model we want to connect to 19 | target_model="Coffee", 20 | ## The relationship-model is the model which defines the relationship 21 | ## between a target model (in this case `Coffee`) and the model it is defined on 22 | relationship_model=Consumed, 23 | ## The direction of the relationship inside the graph 24 | direction=RelationshipPropertyDirection.OUTGOING, 25 | ## Cardinality defines how many nodes can be connected to the relationship 26 | ## **Note**: This only softly enforces cardinality from the model it's defined on 27 | ## and does not enforce it on the database level 28 | cardinality=RelationshipPropertyCardinality.ZERO_OR_MORE, 29 | ## Whether to allow multiple connections to the same node 30 | allow_multiple=True, 31 | ) 32 | ``` 33 | 34 | ### Available methods 35 | 36 | Just like regular models, relationship-properties also provide a few methods to make working with them easier. In this section we are going to take a closer look at the different methods available to you. 37 | 38 | > **Note**: In the following, the terms `source node` and `target node` will be used. Source node refers to the `node instance the method is called on` and target node refers to the `node/s passed to the method`. 39 | 40 | #### RelationshipProperty.relationships() 41 | 42 | Returns the relationships between the source node and the target node. The method expects a single argument `node` which has to be the target node of the relationship. This always returns a list of relationship instances or an empty list if no relationships were found. 43 | 44 | ```python 45 | ## The `developer` and `coffee` variables have been defined somewhere above 46 | 47 | ## Returns the relationships between the two nodes 48 | coffee_relationships = await developer.coffee.relationships(coffee) 49 | 50 | print(coffee_relationships) ## [, , ...] 51 | 52 | ## Or if no relationships were found 53 | print(coffee_relationships) ## [] 54 | ``` 55 | 56 | ##### Filters 57 | 58 | This method also allows for (optional) filters. For more about filters, see the [`Filtering queries`](https://github.com/groc-prog/pyneo4j-ogm/blob/develop/docs/Query.md#filtering-queries) section. 59 | 60 | ```python 61 | ## Only returns the relationships between the two nodes where 62 | ## the `developer liked the coffee` 63 | coffee_relationships = await developer.coffee.relationships(coffee, {"likes_it": True}) 64 | 65 | print(coffee_relationships) ## [, , ...] 66 | ``` 67 | 68 | ##### Projections 69 | 70 | `Projections` can be used to only return specific parts of the models as dictionaries. For more information about projections, see the [`Projections`](https://github.com/groc-prog/pyneo4j-ogm/blob/develop/docs/Query.md#projections) section. 71 | 72 | ```python 73 | ## Returns dictionaries with the relationships `liked` property is at the 74 | ## `loved_it` key instead of model instances 75 | coffee_relationships = await developer.coffee.relationships(coffee, projections={"loved_it": "liked"}) 76 | 77 | print(coffee_relationships) ## [{"loved_it": True}, {"loved_it": False}, ...] 78 | ``` 79 | 80 | ##### Query options 81 | 82 | `Query options` can be used to define how results are returned from the query. They are provided via the `options` argument. For more about query options, see the [`Query options`](https://github.com/groc-prog/pyneo4j-ogm/blob/develop/docs/Query.md#query-options) section. 83 | 84 | ```python 85 | ## Skips the first 10 results and returns the next 20 86 | coffee_relationships = await developer.coffee.relationships(coffee, options={"limit": 20, "skip": 10}) 87 | 88 | print(coffee_relationships) ## [, , ...] up to 20 results 89 | ``` 90 | 91 | #### RelationshipProperty.connect() 92 | 93 | Connects the given target node to the source node. The method expects the target node as the first argument, and optional properties as the second argument. The properties provided will be carried over to the relationship inside the graph. 94 | 95 | Depending on the `allow_multiple` option, which is defined on the relationship-property, this method will either create a new relationship or update the existing one. If the `allow_multiple` option is set to `True`, this method will always create a new relationship. Otherwise, the query will use a `MERGE` statement to update an existing relationship. 96 | 97 | ```python 98 | ## The `developer` and `coffee` variables have been defined somewhere above 99 | 100 | coffee_relationship = await developer.coffee.connect(coffee, {"likes_it": True}) 101 | 102 | print(coffee_relationship) ## 103 | ``` 104 | 105 | #### RelationshipProperty.disconnect() 106 | 107 | Disconnects the target node from the source node and deletes all relationships between them. The only argument to the method is the target node. If no relationships exist between the two, nothing is deleted and `0` is returned. Otherwise, the number of deleted relationships is returned. 108 | 109 | > **Note**: If `allow_multiple` was set to `True` and multiple relationships to the target node exist, all of them will be deleted. 110 | 111 | ```python 112 | ## The `developer` and `coffee` variables have been defined somewhere above 113 | 114 | coffee_relationship_count = await developer.coffee.disconnect(coffee) 115 | 116 | print(coffee_relationship_count) ## However many relationships were deleted 117 | ``` 118 | 119 | ##### Raise on empty result 120 | 121 | By default, the `disconnect()` method will return `None` if no results were found. If you want to raise an exception instead, you can pass `raise_on_empty=True` to the method. 122 | 123 | ```python 124 | ## Raises a `NoResultFound` exception if no results were matched 125 | coffee_relationship_count = await developer.coffee.disconnect(coffee, raise_on_empty=True) 126 | ``` 127 | 128 | #### RelationshipProperty.disconnect_all() 129 | 130 | Disconnects all target nodes from the source node and deletes all relationships between them. Returns the number of deleted relationships. 131 | 132 | ```python 133 | ## This will delete all relationships to `Coffee` nodes for this `Developer` node 134 | coffee_relationship_count = await developer.coffee.disconnect_all() 135 | 136 | print(coffee_relationship_count) ## However many relationships were deleted 137 | ``` 138 | 139 | #### RelationshipProperty.replace() 140 | 141 | Disconnects all relationships from the source node to the old target node and connects them back to the new target node, carrying over all properties defined in the relationship. Returns the replaced relationships. 142 | 143 | > **Note**: If `multiple relationships` between the target node and the old source node exist, `all of them` will be replaced. 144 | 145 | ```python 146 | ## Currently there are two relationships defined between the `developer` and `coffee_latte` 147 | ## nodes where the `likes_it` property is set to `True` and `False` respectively 148 | 149 | ## Moves the relationships from `coffee_latte` to `coffee_americano` 150 | replaced_coffee_relationships = await developer.coffee.replace(coffee_latte, coffee_americano) 151 | 152 | print(replaced_coffee_relationships) ## [, ] 153 | ``` 154 | 155 | #### RelationshipProperty.find_connected_nodes() 156 | 157 | Finds and returns all connected nodes for the given relationship-property. This method always returns a list of instances/dictionaries or an empty list if no results were found. 158 | 159 | ```python 160 | ## Returns all `Coffee` nodes 161 | coffees = await developer.coffee.find_connected_nodes() 162 | 163 | print(coffees) ## [, , ...] 164 | 165 | ## Or if no matches were found 166 | print(coffees) ## [] 167 | ``` 168 | 169 | ##### Filters 170 | 171 | You can pass filters using the `filters` argument to filter the returned nodes. For more about filters, see the [`Filtering queries`](https://github.com/groc-prog/pyneo4j-ogm/blob/develop/docs/Query.md#filtering-queries) section. 172 | 173 | ```python 174 | ## Returns all `Coffee` nodes where the `sugar` property is set to `True` 175 | coffees = await developer.coffee.find_connected_nodes({"sugar": True}) 176 | 177 | print(coffees) ## [, , ...] 178 | ``` 179 | 180 | ##### Projections 181 | 182 | `Projections` can be used to only return specific parts of the models as dictionaries. For more information about projections, see the [`Projections`](https://github.com/groc-prog/pyneo4j-ogm/blob/develop/docs/Query.mdprojections) section. 183 | 184 | ```python 185 | ## Returns dictionaries with the coffee's `sugar` property at the `contains_sugar` key instead 186 | ## of model instances 187 | coffees = await developer.coffee.find_connected_nodes({"sugar": True}, {"contains_sugar": "sugar"}) 188 | 189 | print(coffees) ## [{"contains_sugar": True}, {"contains_sugar": False}, ...] 190 | ``` 191 | 192 | ##### Query options 193 | 194 | `Query options` can be used to define how results are returned from the query. They are provided via the `options` argument. For more about query options, see the [`Query options`](https://github.com/groc-prog/pyneo4j-ogm/blob/develop/docs/Query.mdquery-options) section. 195 | 196 | ```python 197 | ## Skips the first 10 results and returns the next 20 198 | coffees = await developer.coffee.find_connected_nodes({"sugar": True}, options={"limit": 20, "skip": 10}) 199 | 200 | ## Skips the first 10 results and returns up to 20 201 | print(coffees) ## [, , ...] 202 | ``` 203 | 204 | ##### Auto-fetching nodes 205 | 206 | The `auto_fetch_nodes` and `auto_fetch_models` parameters can be used to automatically fetch all or selected nodes from defined relationship-properties when running the `find_many()` query. For more about auto-fetching, see [`Auto-fetching relationship-properties`](https://github.com/groc-prog/pyneo4j-ogm/blob/develop/docs/Query.md#auto-fetching-relationship-properties). 207 | 208 | ```python 209 | ## Returns coffee instances with `instance..nodes` properties already fetched 210 | coffees = await developer.coffee.find_connected_nodes(auto_fetch_nodes=True) 211 | 212 | print(coffees[0].developer.nodes) ## [, , ...] 213 | print(coffees[0].other_property.nodes) ## [, , ...] 214 | 215 | ## Returns coffee instances with only the `instance.developer.nodes` property already fetched 216 | coffees = await developer.coffee.find_connected_nodes(auto_fetch_nodes=True, auto_fetch_models=[Developer]) 217 | 218 | ## Auto-fetch models can also be passed as strings 219 | coffees = await developer.coffee.find_connected_nodes(auto_fetch_nodes=True, auto_fetch_models=["Developer"]) 220 | 221 | print(coffees[0].developer.nodes) ## [, , ...] 222 | print(coffees[0].other_property.nodes) ## [] 223 | ``` 224 | 225 | ### Hooks with relationship properties 226 | 227 | Although slightly different, hooks can also be registered for relationship-properties. The only different lies in the arguments passed to the hook function. Since relationship-properties are defined on a `NodeModel` class, the hook function will receive the `NodeModel class context` of the model it has been called on as the first argument instead of the `RelationshipProperty class context` (like it would for regular models). 228 | 229 | > **Note:** The rest of the arguments passed to the hook function are the same as for regular models. 230 | 231 | ```python 232 | class Developer(NodeModel): 233 | 234 | ## Here we define a relationship to one or more `Coffee` nodes, both the target 235 | ## and relationship-model can be defined as strings (Has to be the exact name of the model) 236 | 237 | ## Notice that the `RelationshipProperty` class takes two type arguments, the first 238 | ## one being the target model and the second one being the relationship-model 239 | ## Can can get away without defining these, but it is recommended to do so for 240 | ## better type hinting 241 | coffee: RelationshipProperty["Coffee", "Consumed"] = RelationshipProperty( 242 | ## The target model is the model we want to connect to 243 | target_model="Coffee", 244 | ## The relationship-model is the model which defines the relationship 245 | ## between a target model (in this case `Coffee`) and the model it is defined on 246 | relationship_model=Consumed, 247 | ## The direction of the relationship inside the graph 248 | direction=RelationshipPropertyDirection.OUTGOING, 249 | ## Cardinality defines how many nodes can be connected to the relationship 250 | ## **Note**: This only softly enforces cardinality from the model it's defined on 251 | ## and does not enforce it on the database level 252 | cardinality=RelationshipPropertyCardinality.ZERO_OR_MORE, 253 | ## Whether to allow multiple connections to the same node 254 | allow_multiple=True, 255 | ) 256 | 257 | class Settings: 258 | post_hooks = { 259 | "coffee.connect": lambda self, *args, **kwargs: print(type(self)) 260 | } 261 | 262 | ## Somewhere further down the line... 263 | ## Prints `` instead of `` 264 | await developer.coffee.connect(coffee) 265 | ``` 266 | 267 | The reason for this change in the hooks behavior is simple, really. Since relationship-properties are only used to define relationships between nodes, it makes more sense to have the `NodeModel class context` available inside the hook function instead of the `RelationshipProperty class context`, since the hook function will most likely be used to execute code on the model the relationship-property is defined on. 268 | -------------------------------------------------------------------------------- /pyneo4j_ogm/__init__.py: -------------------------------------------------------------------------------- 1 | # pylint: disable=missing-module-docstring 2 | 3 | from .core.client import EntityType, Pyneo4jClient 4 | from .core.node import NodeModel 5 | from .core.relationship import RelationshipModel 6 | from .fields.property_options import WithOptions 7 | from .fields.relationship_property import ( 8 | RelationshipProperty, 9 | RelationshipPropertyCardinality, 10 | RelationshipPropertyDirection, 11 | ) 12 | from .queries.types import QueryOptionsOrder 13 | -------------------------------------------------------------------------------- /pyneo4j_ogm/exceptions.py: -------------------------------------------------------------------------------- 1 | """ 2 | Exceptions module for Pyneo4j OGM. 3 | """ 4 | 5 | from typing import Any, List 6 | 7 | 8 | class Pyneo4jException(Exception): 9 | """ 10 | Base exception for all Pyneo4j exceptions. 11 | """ 12 | 13 | 14 | class NotConnectedToDatabase(Pyneo4jException): 15 | """ 16 | A client tried to run a query without being connected to a database. 17 | """ 18 | 19 | def __init__(self, *args: object) -> None: 20 | super().__init__("Client is not connected to a database", *args) 21 | 22 | 23 | class UnsupportedNeo4jVersion(Pyneo4jException): 24 | """ 25 | A client tried to connect to a Neo4j database with a unsupported version (Neo4j 5+ is supported). 26 | """ 27 | 28 | def __init__(self, *args: object) -> None: 29 | super().__init__("Only Neo4j 5+ is supported.", *args) 30 | 31 | 32 | class MissingDatabaseURI(Pyneo4jException): 33 | """ 34 | A client tried to initialize without providing a connection URI. 35 | """ 36 | 37 | def __init__(self, *args: object) -> None: 38 | super().__init__("Trying to initialize client without providing connection URI", *args) 39 | 40 | 41 | class InvalidEntityType(Pyneo4jException): 42 | """ 43 | A invalid graph entity type was provided. 44 | """ 45 | 46 | def __init__(self, available_types: List[str], entity_type: str, *args: object) -> None: 47 | super().__init__( 48 | f"Invalid entity type. Expected entity type to be one of {available_types}, got {entity_type}", 49 | *args, 50 | ) 51 | 52 | 53 | class InvalidRelationshipDirection(Pyneo4jException): 54 | """ 55 | A invalid relationship direction was provided. 56 | """ 57 | 58 | def __init__(self, direction: str, *args: object) -> None: 59 | super().__init__( 60 | f"""Invalid relationship direction {direction} was provided. Expected one of 61 | 'INCOMING', 'OUTGOING' or 'BOTH'""", 62 | *args, 63 | ) 64 | 65 | 66 | class InstanceNotHydrated(Pyneo4jException): 67 | """ 68 | A model was used to run a query, but the instance was not hydrated. 69 | """ 70 | 71 | def __init__(self, *args: object) -> None: 72 | super().__init__("Queries can not be run on instances which have not been hydrated", *args) 73 | 74 | 75 | class InstanceDestroyed(Pyneo4jException): 76 | """ 77 | A model was used to run a query, but the instance was destroyed. 78 | """ 79 | 80 | def __init__(self, *args: object) -> None: 81 | super().__init__("Queries can not be run on instances which have been destroyed", *args) 82 | 83 | 84 | class UnexpectedEmptyResult(Pyneo4jException): 85 | """ 86 | A query should have returned results, but did not. This exception does not include a specific 87 | reason for why it failed, as it is not possible to determine the reason. 88 | """ 89 | 90 | def __init__(self, *args: object) -> None: 91 | super().__init__("The query did not match any results.", *args) 92 | 93 | 94 | class UnregisteredModel(Pyneo4jException): 95 | """ 96 | A model used another node- or relationship model within a query, but the model was not 97 | registered with the client. 98 | """ 99 | 100 | def __init__(self, model: str, *args: object) -> None: 101 | super().__init__( 102 | f"""Model {model} is not registered or is using other unregistered models. Please register all models 103 | before using them.""", 104 | *args, 105 | ) 106 | 107 | 108 | class InvalidTargetNode(Pyneo4jException): 109 | """ 110 | A relationship-property method was called on a node model, but the target node was not of the 111 | expected model type. 112 | """ 113 | 114 | def __init__(self, expected_type: str, actual_type: str, *args: object) -> None: 115 | super().__init__( 116 | f"Expected target node to be instance of subclass of {expected_type}, but got {actual_type}", 117 | *args, 118 | ) 119 | 120 | 121 | class InvalidLabelOrType(Pyneo4jException): 122 | """ 123 | Invalid node label or relationship type was provided. 124 | """ 125 | 126 | def __init__(self, *args: object) -> None: 127 | super().__init__("Invalid label or type", *args) 128 | 129 | 130 | class TransactionInProgress(Pyneo4jException): 131 | """ 132 | A client tried to start a transaction, but a transaction is already in progress. 133 | """ 134 | 135 | def __init__(self, *args: object) -> None: 136 | super().__init__("A transaction is already in progress.", *args) 137 | 138 | 139 | class NotConnectedToSourceNode(Pyneo4jException): 140 | """ 141 | A relationship-property method was called with a node which is not connected to the source node. 142 | """ 143 | 144 | def __init__(self, *args: object) -> None: 145 | super().__init__("Node not connected to source node.", *args) 146 | 147 | 148 | class InvalidFilters(Pyneo4jException): 149 | """ 150 | The method is missing filters or the filters are invalid. 151 | """ 152 | 153 | def __init__(self, *args: object) -> None: 154 | super().__init__( 155 | "Missing or invalid filters. Maybe you got a typo in the query operators?", 156 | *args, 157 | ) 158 | 159 | 160 | class InvalidRelationshipHops(Pyneo4jException): 161 | """ 162 | A multi-hop relationship query was attempted, but the hops were invalid. 163 | """ 164 | 165 | def __init__(self, *args: object) -> None: 166 | super().__init__("Invalid relationship hop. Hop must be positive integer or '*'.", *args) 167 | 168 | 169 | class CardinalityViolation(Pyneo4jException): 170 | """ 171 | A query would have violated a cardinality constraint. 172 | """ 173 | 174 | def __init__( 175 | self, cardinality_type: str, relationship_type: str, start_model: str, end_model: str, *args: object 176 | ) -> None: 177 | super().__init__( 178 | f"""Cardinality {cardinality_type} for relationship {relationship_type} between {start_model} and 179 | {end_model} is being violated.""", 180 | *args, 181 | ) 182 | 183 | 184 | class NoResultFound(Pyneo4jException): 185 | """ 186 | A query with required filters did not match any results. 187 | """ 188 | 189 | def __init__(self, filters: Any, *args: object) -> None: 190 | super().__init__(f"No matching results for filter {filters}", *args) 191 | 192 | 193 | class InvalidBookmark(Pyneo4jException): 194 | """ 195 | A bookmark was used to start a transaction, but the bookmark was invalid. 196 | """ 197 | 198 | def __init__(self, bookmarks: Any, *args: object) -> None: 199 | super().__init__(f"Expected valid bookmarks, but received {bookmarks}", *args) 200 | 201 | 202 | class MigrationNotInitialized(Pyneo4jException): 203 | """ 204 | Migrations have not been initialized before usage. 205 | """ 206 | 207 | def __init__(self, *args: object) -> None: 208 | super().__init__("Migrations have not been initialized. Run `pyneo4j_ogm init` to initialize them", *args) 209 | 210 | 211 | class ListItemNotEncodable(Pyneo4jException): 212 | """ 213 | A list item is not JSON encodable and can thus not be stored. 214 | """ 215 | 216 | def __init__(self, *args: object) -> None: 217 | super().__init__("List item is not JSON encodable and can not be stored inside the database", *args) 218 | -------------------------------------------------------------------------------- /pyneo4j_ogm/fields/property_options.py: -------------------------------------------------------------------------------- 1 | """ 2 | Model property wrapper for defining indexes and constraints on properties. 3 | """ 4 | from typing import Any, Type 5 | 6 | from pyneo4j_ogm.pydantic_utils import IS_PYDANTIC_V2 7 | 8 | if IS_PYDANTIC_V2: 9 | from pydantic import GetCoreSchemaHandler 10 | from pydantic_core import CoreSchema 11 | 12 | 13 | def WithOptions( 14 | property_type: Type, 15 | range_index: bool = False, 16 | text_index: bool = False, 17 | point_index: bool = False, 18 | unique: bool = False, 19 | ): 20 | """ 21 | Returns a subclass of `property_type` and defines indexes and constraints on the property. 22 | 23 | Args: 24 | property_type (Type): The property type to return for the model field. 25 | range_index (bool, optional): Whether the property should have a `RANGE` index or not. Defaults to `False`. 26 | text_index (bool, optional): Whether the property should have a `TEXT` index or not. Defaults to `False`. 27 | point_index (bool, optional): Whether the property should have a `POINT` index or not. Defaults to `False`. 28 | unique (bool, optional): Whether a `UNIQUENESS` constraint should be created for the property. 29 | Defaults to `False`. 30 | 31 | Returns: 32 | A subclass of the provided type with extra attributes. 33 | """ 34 | 35 | class PropertyWithOptions(property_type): 36 | """ 37 | Subclass of provided type with extra arguments. 38 | """ 39 | 40 | _range_index: bool = range_index 41 | _text_index: bool = text_index 42 | _point_index: bool = point_index 43 | _unique: bool = unique 44 | 45 | def __new__(cls, *args, **kwargs): 46 | return property_type.__new__(property_type, *args, **kwargs) 47 | 48 | if IS_PYDANTIC_V2: 49 | 50 | @classmethod 51 | def __get_pydantic_core_schema__(cls, _: Any, handler: GetCoreSchemaHandler) -> CoreSchema: # type: ignore 52 | return handler(property_type) # type: ignore 53 | 54 | return PropertyWithOptions 55 | -------------------------------------------------------------------------------- /pyneo4j_ogm/fields/settings.py: -------------------------------------------------------------------------------- 1 | """ 2 | Settings for model classes. 3 | """ 4 | 5 | from typing import Callable, Dict, List, Optional, Set, Union 6 | 7 | from pydantic import BaseModel 8 | from pydantic.class_validators import validator 9 | 10 | from pyneo4j_ogm.pydantic_utils import IS_PYDANTIC_V2 11 | 12 | if IS_PYDANTIC_V2: 13 | from pydantic import field_validator 14 | 15 | 16 | def _normalize_hooks(hooks: Dict[str, Union[List[Callable], Callable]]) -> Dict[str, List[Callable]]: 17 | """ 18 | Normalize a list of hooks to a list of callables. 19 | """ 20 | normalized_hooks: Dict[str, List[Callable]] = {} 21 | 22 | if isinstance(hooks, dict): 23 | for hook_name, hook_function in hooks.items(): 24 | if callable(hook_function): 25 | normalized_hooks[hook_name] = [hook_function] 26 | elif isinstance(hook_function, list): 27 | normalized_hooks[hook_name] = [func for func in hook_function if callable(func)] 28 | 29 | return normalized_hooks 30 | 31 | 32 | class BaseModelSettings(BaseModel): 33 | """ 34 | Shared settings for NodeModel and RelationshipModel classes or subclasses. 35 | """ 36 | 37 | pre_hooks: Dict[str, List[Callable]] = {} 38 | post_hooks: Dict[str, List[Callable]] = {} 39 | 40 | if IS_PYDANTIC_V2: 41 | normalize_pre_hooks = field_validator("pre_hooks", mode="before")(_normalize_hooks) 42 | normalize_post_hooks = field_validator("post_hooks", mode="before")(_normalize_hooks) 43 | else: 44 | normalize_pre_hooks = validator("pre_hooks", pre=True, allow_reuse=True)(_normalize_hooks) 45 | normalize_post_hooks = validator("post_hooks", pre=True, allow_reuse=True)(_normalize_hooks) 46 | 47 | if IS_PYDANTIC_V2: 48 | model_config = { 49 | "validate_assignment": True, 50 | } 51 | else: 52 | 53 | class Config: 54 | validate_assignment = True 55 | 56 | 57 | class NodeModelSettings(BaseModelSettings): 58 | """ 59 | Settings for a NodeModel class. 60 | """ 61 | 62 | labels: Set[str] = set() 63 | auto_fetch_nodes: Optional[bool] = None 64 | 65 | 66 | class RelationshipModelSettings(BaseModelSettings): 67 | """ 68 | Settings for a RelationshipModel class. 69 | """ 70 | 71 | type: Optional[str] = None 72 | -------------------------------------------------------------------------------- /pyneo4j_ogm/logger.py: -------------------------------------------------------------------------------- 1 | """ 2 | Logging module. 3 | 4 | Logging is controlled by two environment variables: 5 | - PYNEO4J_OGM_LOG_LEVEL: the log level to use. Defaults to `WARNING`. 6 | - PYNEO4J_OGM_ENABLE_LOGGING: whether to enable logging. Defaults to `True`. 7 | """ 8 | import logging 9 | from os import environ 10 | 11 | enable_logging = environ.get("PYNEO4J_OGM_ENABLE_LOGGING", "True").lower() == "true" 12 | log_level = int(environ.get("PYNEO4J_OGM_LOG_LEVEL", logging.WARNING)) 13 | 14 | logger = logging.getLogger("pyneo4j-ogm") 15 | logger.setLevel(log_level) 16 | 17 | handler = logging.StreamHandler() 18 | handler.setLevel(log_level) 19 | 20 | formatter = logging.Formatter("[%(asctime)s] [%(name)s] [%(levelname)s] %(message)s") 21 | handler.setFormatter(formatter) 22 | logger.addHandler(handler) 23 | 24 | if not enable_logging: 25 | logger.disabled = True 26 | -------------------------------------------------------------------------------- /pyneo4j_ogm/migrations/__init__.py: -------------------------------------------------------------------------------- 1 | # pylint: disable=missing-module-docstring 2 | 3 | from .actions.create import create 4 | from .actions.down import down 5 | from .actions.init import init 6 | from .actions.status import status 7 | from .actions.up import up 8 | -------------------------------------------------------------------------------- /pyneo4j_ogm/migrations/actions/create.py: -------------------------------------------------------------------------------- 1 | """ 2 | Handles the creation of new migration files. 3 | """ 4 | 5 | import os 6 | import re 7 | from datetime import datetime 8 | from typing import Dict, Optional 9 | 10 | from pyneo4j_ogm.logger import logger 11 | from pyneo4j_ogm.migrations.utils.defaults import MIGRATION_TEMPLATE 12 | from pyneo4j_ogm.migrations.utils.migration import ( 13 | check_initialized, 14 | get_migration_config, 15 | ) 16 | 17 | 18 | def normalize_filename(name: str) -> str: 19 | """ 20 | Converts a file name to snake case. 21 | 22 | Args: 23 | name(str): String to convert 24 | 25 | Returns: 26 | str: Converted string 27 | """ 28 | converted = re.sub(r"(.)([A-Z][a-z]+)", r"\1_\2", name) 29 | converted = re.sub(r"([a-z0-9])([A-Z])", r"\1_\2", converted).lower() 30 | return re.sub(r"\W+", "_", converted) 31 | 32 | 33 | def create(name: str, config_path: Optional[str] = None) -> Dict[str, str]: 34 | """ 35 | Creates a new, empty migration file. 36 | 37 | Args: 38 | name(str): Name of the migration 39 | config_path(str, optional): Path to the migration config file. Defaults to None. 40 | """ 41 | check_initialized(config_path=config_path) 42 | 43 | logger.info("Creating new migration file") 44 | migration_timestamp = str(datetime.now().strftime("%Y%m%d%H%M%S")) 45 | config = get_migration_config(config_path=config_path) 46 | 47 | logger.debug("Generating migration file name") 48 | filename = f"{migration_timestamp}-{normalize_filename(name)}.py" 49 | filepath = os.path.join(config.migration_dir, filename) 50 | 51 | logger.debug("Writing migration file") 52 | with open(filepath, "w", encoding="utf-8") as f: 53 | f.write(MIGRATION_TEMPLATE.format(name=filename)) 54 | 55 | logger.info("Created new migration file %s at %s", filename, filepath) 56 | return {"name": filename, "path": filepath} 57 | -------------------------------------------------------------------------------- /pyneo4j_ogm/migrations/actions/down.py: -------------------------------------------------------------------------------- 1 | """ 2 | Reverts the defined number of migrations in correct order. 3 | """ 4 | 5 | from copy import deepcopy 6 | from datetime import datetime 7 | from typing import Optional 8 | 9 | from pyneo4j_ogm.logger import logger 10 | from pyneo4j_ogm.migrations.utils.client import MigrationClient 11 | from pyneo4j_ogm.migrations.utils.migration import ( 12 | RunMigrationCount, 13 | check_initialized, 14 | get_migration_config, 15 | get_migration_files, 16 | ) 17 | 18 | 19 | async def down(down_count: RunMigrationCount = "all", config_path: Optional[str] = None) -> None: 20 | """ 21 | Reverts the defined number of migrations in correct order. 22 | 23 | Args: 24 | down_count(int, optional): Number of migrations to revert. Can be "all" to revert all migrations. 25 | Defaults to "all". 26 | config_path(str, optional): Path to the migration config file. Defaults to None. 27 | """ 28 | check_initialized(config_path=config_path) 29 | config = get_migration_config(config_path) 30 | 31 | logger.info("Rolling back %s migrations", down_count) 32 | async with MigrationClient(config) as migration_client: 33 | migration_files = get_migration_files(config.migration_dir) 34 | migration_node = await migration_client.get_migration_node() 35 | 36 | logger.debug("Filtering migration files for applied migrations") 37 | applied_migration_identifiers = migration_node.get_applied_migration_identifiers 38 | # Remove all migration files that have not been applied 39 | for identifier in deepcopy(migration_files).keys(): 40 | if identifier not in applied_migration_identifiers: 41 | migration_files.pop(identifier, None) 42 | 43 | for count, _ in enumerate(deepcopy(migration_files).values()): 44 | if down_count != "all" and count >= down_count: 45 | break 46 | 47 | # We can get the current migration by getting the max identifier, which is a 48 | # UNIX timestamp meaning the highest value is the most recent migration 49 | current_migration_identifier = max(migration_files.keys()) 50 | current_migration = migration_files[current_migration_identifier] 51 | 52 | logger.debug("Rolling back migration %s", current_migration["name"]) 53 | await current_migration["down"](migration_client.client) 54 | migration_files.pop(current_migration_identifier) 55 | migration_node.applied_migrations = [ 56 | migration 57 | for migration in migration_node.applied_migrations 58 | if migration.name != current_migration["name"] 59 | ] 60 | 61 | migration_node.updated_at = ( 62 | migration_node.applied_migrations[-1].applied_at 63 | if len(migration_node.applied_migrations) > 0 64 | else datetime.timestamp(datetime.now()) 65 | ) 66 | await migration_node.update() 67 | -------------------------------------------------------------------------------- /pyneo4j_ogm/migrations/actions/init.py: -------------------------------------------------------------------------------- 1 | """ 2 | Handles the initialization of the migrations directory. 3 | """ 4 | 5 | import json 6 | import os 7 | 8 | from pyneo4j_ogm.logger import logger 9 | from pyneo4j_ogm.migrations.utils.defaults import ( 10 | DEFAULT_CONFIG_FILENAME, 11 | DEFAULT_CONFIG_URI, 12 | DEFAULT_MIGRATION_DIR, 13 | ) 14 | from pyneo4j_ogm.migrations.utils.models import MigrationConfig, Neo4jDatabaseConfig 15 | from pyneo4j_ogm.pydantic_utils import get_model_dump_json 16 | 17 | 18 | def init(migration_dir: str = DEFAULT_MIGRATION_DIR, uri: str = DEFAULT_CONFIG_URI) -> None: 19 | """ 20 | Initializes the migrations directory. 21 | 22 | Args: 23 | migration_dir(str): Path to the migrations directory. Defaults to "migrations". 24 | uri(str): Neo4j database URI. Defaults to "bolt://localhost:7687". 25 | """ 26 | logger.info("Initializing migrations directory") 27 | 28 | config: MigrationConfig 29 | root = os.getcwd() 30 | 31 | # Check if a config file already exists, if so use that, otherwise create a new one 32 | if not os.path.exists(path=os.path.join(root, DEFAULT_CONFIG_FILENAME)): 33 | logger.debug("No config file found. Creating default config file.") 34 | config = MigrationConfig(migration_dir=migration_dir, neo4j=Neo4jDatabaseConfig(uri=uri)) 35 | 36 | with open(os.path.join(root, DEFAULT_CONFIG_FILENAME), "w", encoding="utf-8") as f: 37 | f.write(get_model_dump_json(config, exclude_none=True, indent=2)) 38 | else: 39 | logger.debug("Config file found. Loading config.") 40 | with open(os.path.join(root, DEFAULT_CONFIG_FILENAME), "r", encoding="utf-8") as f: 41 | config = MigrationConfig(**json.load(f)) 42 | 43 | if not os.path.exists(config.migration_dir): 44 | logger.debug("Creating migration directory") 45 | os.makedirs(config.migration_dir, exist_ok=True) 46 | 47 | # Add .gitignore file so the setup does not get omitted of no migration is cerated initially 48 | logger.debug("Creating .gitkeep file") 49 | with open(os.path.join(config.migration_dir, ".gitkeep"), "w", encoding="utf-8") as f: 50 | f.write("") 51 | 52 | logger.info("Initialized migrations directory at %s", os.path.abspath(config.migration_dir)) 53 | -------------------------------------------------------------------------------- /pyneo4j_ogm/migrations/actions/status.py: -------------------------------------------------------------------------------- 1 | """ 2 | Shows which migrations have been run or are pending. 3 | """ 4 | 5 | from datetime import datetime 6 | from typing import List, Optional, TypedDict 7 | 8 | from typing_extensions import Literal 9 | 10 | from pyneo4j_ogm.logger import logger 11 | from pyneo4j_ogm.migrations.utils.client import MigrationClient 12 | from pyneo4j_ogm.migrations.utils.migration import ( 13 | check_initialized, 14 | get_migration_config, 15 | get_migration_files, 16 | ) 17 | 18 | 19 | class MigrationState(TypedDict): 20 | name: str 21 | applied_at: Optional[str] 22 | 23 | 24 | class MigrationStatus(TypedDict): 25 | name: str 26 | applied_at: Optional[float] 27 | status: Literal["APPLIED", "PENDING"] 28 | 29 | 30 | def pretty_print(migrations: List[List[str]]) -> None: 31 | """ 32 | Prints a pretty version of the migration status. 33 | 34 | migrations(List[List[str]]): A list of migrations where the first item is the migration 35 | name and the second item is the status. 36 | """ 37 | max_length = max(len(str(item[0])) for item in migrations) 38 | top_border_line = "┌" + "─" * (max_length + 2) + "┬" + "─" * 26 + "┐" 39 | bottom_border_line = "└" + "─" * (max_length + 2) + "┴" + "─" * 26 + "┘" 40 | header_line = "│ " + "Migration".ljust(max_length) + " │ " + "Applied At".ljust(24) + " │" 41 | separator_line = "├" + "─" * (max_length + 2) + "┼" + "─" * 26 + "┤" 42 | 43 | print(top_border_line) 44 | print(header_line) 45 | print(separator_line) 46 | 47 | for migration in migrations: 48 | row = "│ " + str(migration[0]).ljust(max_length) + " │ " + str(migration[1]).ljust(24) + " │" 49 | print(row) 50 | 51 | print(bottom_border_line) 52 | 53 | 54 | async def status(config_path: Optional[str] = None) -> List[MigrationStatus]: 55 | """ 56 | Visualize the status of all migrations. 57 | 58 | Args: 59 | config_path(str, optional): Path to the migration config file. Defaults to None. 60 | """ 61 | check_initialized(config_path=config_path) 62 | 63 | logger.info("Checking status for migrations") 64 | migrations: List[List[str]] = [] 65 | migration_status: List[MigrationStatus] = [] 66 | config = get_migration_config(config_path) 67 | 68 | async with MigrationClient(config) as client: 69 | migration_files = get_migration_files(config.migration_dir) 70 | migration_node = await client.get_migration_node() 71 | 72 | logger.debug("Building migration state") 73 | for _, migration_file in migration_files.items(): 74 | if migration_node is None: 75 | migrations.append([migration_file["name"], "PENDING"]) 76 | migration_status.append({"name": migration_file["name"], "applied_at": None, "status": "PENDING"}) 77 | else: 78 | migration = next( 79 | ( 80 | applied_migration 81 | for applied_migration in migration_node.applied_migrations 82 | if applied_migration.name == migration_file["name"] 83 | ), 84 | None, 85 | ) 86 | 87 | if migration is None: 88 | migrations.append([migration_file["name"], "PENDING"]) 89 | migration_status.append({"name": migration_file["name"], "applied_at": None, "status": "PENDING"}) 90 | else: 91 | migrations.append( 92 | [ 93 | migration_file["name"], 94 | datetime.fromtimestamp(migration.applied_at).strftime("%Y-%m-%d %H:%M:%S"), 95 | ] 96 | ) 97 | migration_status.append( 98 | { 99 | "name": migration_file["name"], 100 | "applied_at": migration.applied_at, 101 | "status": "APPLIED", 102 | } 103 | ) 104 | 105 | logger.debug("Sorting %s migrations by applied_at timestamp and name", len(migrations)) 106 | migrations.sort(key=lambda migration: (migration[1], migration[0])) 107 | pretty_print(migrations) 108 | 109 | migration_status.sort( 110 | key=lambda migration: (migration["applied_at"] is None, migration["applied_at"], migration["name"]) 111 | ) 112 | return migration_status 113 | -------------------------------------------------------------------------------- /pyneo4j_ogm/migrations/actions/up.py: -------------------------------------------------------------------------------- 1 | """ 2 | Applies the defined number of migrations in correct order. 3 | """ 4 | 5 | from copy import deepcopy 6 | from typing import Optional 7 | 8 | from pyneo4j_ogm.logger import logger 9 | from pyneo4j_ogm.migrations.utils.client import MigrationClient 10 | from pyneo4j_ogm.migrations.utils.migration import ( 11 | RunMigrationCount, 12 | check_initialized, 13 | get_migration_config, 14 | get_migration_files, 15 | ) 16 | from pyneo4j_ogm.migrations.utils.models import AppliedMigration 17 | 18 | 19 | async def up(up_count: RunMigrationCount = "all", config_path: Optional[str] = None) -> None: 20 | """ 21 | Applies the defined number of migrations in correct order. 22 | 23 | Args: 24 | up_count(int, optional): Number of migrations to apply. Can be "all" to apply all migrations. 25 | Defaults to "all". 26 | config_path(str, optional): Path to the migration config file. Defaults to None. 27 | """ 28 | check_initialized(config_path=config_path) 29 | config = get_migration_config(config_path) 30 | 31 | logger.info("Applying next %s migrations", up_count) 32 | async with MigrationClient(config) as migration_client: 33 | migration_files = get_migration_files(config.migration_dir) 34 | migration_node = await migration_client.get_migration_node() 35 | 36 | logger.debug("Filtering migration files for unapplied migrations") 37 | for applied_migration in migration_node.get_applied_migration_identifiers: 38 | migration_files.pop(applied_migration, None) 39 | 40 | for count, _ in enumerate(deepcopy(migration_files).values()): 41 | if up_count != "all" and count >= up_count: 42 | break 43 | 44 | # Since the migration files are sorted by identifier, we can get the current migration 45 | # by getting the min identifier, which is a UNIX timestamp meaning the lowest value is the oldest migration 46 | current_migration_identifier = min(migration_files.keys()) 47 | current_migration = migration_files[current_migration_identifier] 48 | 49 | logger.debug("Applying migration %s", current_migration["name"]) 50 | await current_migration["up"](migration_client.client) 51 | migration_files.pop(current_migration_identifier) 52 | migration_node.applied_migrations.append(AppliedMigration(name=current_migration["name"])) 53 | 54 | migration_node.updated_at = migration_node.applied_migrations[-1].applied_at 55 | await migration_node.update() 56 | -------------------------------------------------------------------------------- /pyneo4j_ogm/migrations/cli.py: -------------------------------------------------------------------------------- 1 | """ 2 | Entry point for the CLI. It parses the arguments and calls the corresponding function. 3 | """ 4 | 5 | import asyncio 6 | import sys 7 | from argparse import ArgumentParser, ArgumentTypeError 8 | from asyncio import iscoroutinefunction 9 | from typing import Any 10 | 11 | from pyneo4j_ogm.logger import logger 12 | from pyneo4j_ogm.migrations import create, down, init, status, up 13 | from pyneo4j_ogm.migrations.utils.migration import RunMigrationCount 14 | 15 | IGNORED_KEYS = ["command", "func"] 16 | 17 | 18 | def parse_migration_count(arg: Any) -> RunMigrationCount: 19 | if arg == "all": 20 | return arg 21 | else: 22 | try: 23 | count = int(arg) 24 | if count < 1: 25 | raise ValueError("Migration count must be greater than 0") 26 | 27 | return count 28 | except ValueError as exc: 29 | raise ArgumentTypeError("Migration count must be an integer or 'all'") from exc 30 | 31 | 32 | def cli() -> None: 33 | """ 34 | Function that parses the CLI arguments and calls the corresponding function. 35 | """ 36 | parser = ArgumentParser(prog="pyneo4j_ogm", description="Migration CLI pyneo4j-ogm models") 37 | subparsers = parser.add_subparsers(dest="command", title="Commands", metavar="") 38 | 39 | # Parser for `ìnit` command 40 | init_parser = subparsers.add_parser("init", help="Initialize migrations for this project") 41 | init_parser.add_argument( 42 | "--migration-dir", 43 | help="Path to the directory where the migrations will be stored", 44 | dest="migration_dir", 45 | required=False, 46 | ) 47 | init_parser.add_argument( 48 | "--uri", 49 | help="URI used to connect to the database", 50 | required=False, 51 | ) 52 | init_parser.set_defaults(func=init) 53 | 54 | # Parser for `create` command 55 | create_parser = subparsers.add_parser("create", help="Creates a new migration file") 56 | create_parser.add_argument("name", help="Name of the migration") 57 | create_parser.add_argument("-c", "--config", help="Path to a config file", dest="config_path", required=False) 58 | create_parser.set_defaults(func=create) 59 | 60 | # Parser for `up` command 61 | up_parser = subparsers.add_parser("up", help="Applies the defined number of migrations") 62 | up_parser.add_argument("-c", "--config", help="Path to a config file", dest="config_path", required=False) 63 | up_parser.add_argument( 64 | "--up-count", 65 | help="Number of migrations to apply. Can either be a integer or 'all'. Omit to apply all pending migrations", 66 | type=parse_migration_count, 67 | dest="up_count", 68 | required=False, 69 | ) 70 | up_parser.set_defaults(func=up) 71 | 72 | # Parser for `down` command 73 | down_parser = subparsers.add_parser("down", help="Rollbacks the defined number of migrations") 74 | down_parser.add_argument("-c", "--config", help="Path to a config file", dest="config_path", required=False) 75 | down_parser.add_argument( 76 | "--down-count", 77 | dest="down_count", 78 | help="""Number of migrations to rollback. Can either be a integer or 'all'. 79 | If omitted, rolls back the last migration""", 80 | type=parse_migration_count, 81 | required=False, 82 | ) 83 | down_parser.set_defaults(func=down) 84 | 85 | # Parser for `status` command 86 | status_parser = subparsers.add_parser("status", help="Shows the status of all migrations") 87 | status_parser.add_argument("-c", "--config", help="Path to a config file", dest="config_path", required=False) 88 | status_parser.set_defaults(func=status) 89 | 90 | args = parser.parse_args() 91 | 92 | if args.command: 93 | arguments = {key: value for key, value in vars(args).items() if key not in IGNORED_KEYS and value is not None} 94 | 95 | try: 96 | if iscoroutinefunction(args.func): 97 | asyncio.run(args.func(**arguments)) 98 | else: 99 | args.func(**arguments) 100 | except Exception as exc: 101 | logger.error("%s failed: %s", args.func.__name__, exc) 102 | sys.exit() 103 | else: 104 | parser.print_help() 105 | -------------------------------------------------------------------------------- /pyneo4j_ogm/migrations/utils/client.py: -------------------------------------------------------------------------------- 1 | """ 2 | Utility for Pyneo4jClient used in migrations. 3 | """ 4 | 5 | from typing import Any, Dict, Optional, cast 6 | 7 | from neo4j import Auth, basic_auth, bearer_auth, custom_auth, kerberos_auth 8 | 9 | from pyneo4j_ogm.core.client import Pyneo4jClient 10 | from pyneo4j_ogm.migrations.utils.models import Migration, MigrationConfig 11 | from pyneo4j_ogm.pydantic_utils import get_model_dump 12 | from pyneo4j_ogm.queries.types import QueryOptionsOrder 13 | 14 | 15 | class MigrationClient: 16 | """ 17 | Utility for Pyneo4jClient used in migrations. 18 | """ 19 | 20 | config: MigrationConfig 21 | client: Pyneo4jClient 22 | 23 | def __init__(self, config: MigrationConfig) -> None: 24 | self.client = Pyneo4jClient() 25 | self.config = config 26 | 27 | async def __aenter__(self): 28 | auth: Optional[Auth] = None 29 | Migration._settings.labels = set(self.config.neo4j.node_labels) 30 | 31 | if self.config.neo4j.options is not None and self.config.neo4j.options.scheme is not None: 32 | match self.config.neo4j.options.scheme: 33 | case "basic": 34 | auth = basic_auth( 35 | user=cast(Dict[str, Any], self.config.neo4j.options.auth)["username"], 36 | password=cast(Dict[str, Any], self.config.neo4j.options.auth)["password"], 37 | ) 38 | case "kerberos": 39 | auth = kerberos_auth( 40 | base64_encoded_ticket=cast(Dict[str, Any], self.config.neo4j.options.auth)[ 41 | "base64_encoded_ticket" 42 | ] 43 | ) 44 | case "bearer": 45 | auth = bearer_auth( 46 | base64_encoded_token=cast(Dict[str, Any], self.config.neo4j.options.auth)[ 47 | "base64_encoded_token" 48 | ] 49 | ) 50 | case _: 51 | auth = custom_auth( 52 | principal=cast(Dict[str, Any], self.config.neo4j.options.auth)["principal"], 53 | credentials=cast(Dict[str, Any], self.config.neo4j.options.auth)["credentials"], 54 | realm=cast(Dict[str, Any], self.config.neo4j.options.auth)["realm"], 55 | scheme=cast(Dict[str, Any], self.config.neo4j.options.auth)["scheme"], 56 | **( 57 | cast(Dict[str, Any], self.config.neo4j.options.auth)["parameters"] 58 | if "parameters" in cast(Dict[str, Any], self.config.neo4j.options.auth) 59 | else {} 60 | ), 61 | ) 62 | 63 | await self.client.connect( 64 | uri=self.config.neo4j.uri, 65 | auth=auth, 66 | **( 67 | get_model_dump(self.config.neo4j.options, exclude={"scheme", "auth"}) 68 | if self.config.neo4j.options is not None 69 | else {} 70 | ), 71 | ) 72 | await self.client.register_models([Migration]) 73 | 74 | return self 75 | 76 | async def __aexit__(self, exc_type: Any, exc_value: Any, traceback: Any) -> None: 77 | await self.client.close() 78 | 79 | async def get_migration_node(self) -> Migration: 80 | """ 81 | Get the migration node from the database. 82 | 83 | Returns: 84 | Migration: If a migration node exists, it will be returned. Otherwise, a 85 | new migration node will be created and returned. 86 | """ 87 | migration = await Migration.find_many( 88 | options={"limit": 1, "sort": "updated_at", "order": QueryOptionsOrder.DESCENDING} 89 | ) 90 | 91 | if len(migration) > 0 and isinstance(migration[0], Migration): 92 | return migration[0] 93 | 94 | migration = Migration() 95 | await migration.create() 96 | 97 | return migration 98 | -------------------------------------------------------------------------------- /pyneo4j_ogm/migrations/utils/defaults.py: -------------------------------------------------------------------------------- 1 | """ 2 | Constants for default values. 3 | """ 4 | 5 | from typing import List 6 | 7 | DEFAULT_CONFIG_URI = "bolt://localhost:7687" 8 | DEFAULT_MIGRATION_DIR = "migrations" 9 | DEFAULT_CONFIG_FILENAME: str = "migration-config.json" 10 | DEFAULT_CONFIG_LABELS: List[str] = ["migration"] 11 | MIGRATION_TEMPLATE = '''""" 12 | Auto-generated migration file {name}. Do not 13 | rename this file or the `up` and `down` functions. 14 | """ 15 | from pyneo4j_ogm import Pyneo4jClient 16 | 17 | 18 | async def up(client: Pyneo4jClient) -> None: 19 | """ 20 | Write your `UP migration` here. 21 | """ 22 | pass 23 | 24 | 25 | async def down(client: Pyneo4jClient) -> None: 26 | """ 27 | Write your `DOWN migration` here. 28 | """ 29 | pass 30 | ''' 31 | -------------------------------------------------------------------------------- /pyneo4j_ogm/migrations/utils/migration.py: -------------------------------------------------------------------------------- 1 | """ 2 | Utilities for checking if the migrations directory has been initialized. 3 | """ 4 | 5 | import importlib.util 6 | import json 7 | import os 8 | from typing import Callable, Dict, Optional, TypedDict, Union 9 | 10 | from typing_extensions import Literal 11 | 12 | from pyneo4j_ogm.exceptions import MigrationNotInitialized 13 | from pyneo4j_ogm.logger import logger 14 | from pyneo4j_ogm.migrations.utils.defaults import DEFAULT_CONFIG_FILENAME 15 | from pyneo4j_ogm.migrations.utils.models import MigrationConfig 16 | 17 | RunMigrationCount = Union[int, Literal["all"]] 18 | MigrationFile = TypedDict("MigrationFile", {"up": Callable, "down": Callable, "name": str}) 19 | 20 | 21 | def check_initialized(config_path: Optional[str]) -> None: 22 | """ 23 | Checks if the migrations directory has been initialized. 24 | 25 | Args: 26 | config_path(str, optional): Path to the migration config file. Defaults to None. 27 | """ 28 | logger.debug("Checking if migrations directory and config have been initialized") 29 | if config_path is not None: 30 | if not os.path.exists(config_path): 31 | raise MigrationNotInitialized 32 | else: 33 | if not os.path.exists(DEFAULT_CONFIG_FILENAME): 34 | raise MigrationNotInitialized 35 | 36 | 37 | def get_migration_files(directory: str) -> Dict[str, MigrationFile]: 38 | """ 39 | Returns all migration files in the given directory. 40 | 41 | Args: 42 | directory(str): Directory to search 43 | 44 | Returns: 45 | Dict[str, MigrationFile]: Dictionary of migration files 46 | """ 47 | migrations: Dict[str, MigrationFile] = {} 48 | 49 | for root, _, files in os.walk(directory): 50 | for file in files: 51 | if file.endswith(".py"): 52 | filepath = os.path.join(root, file) 53 | 54 | logger.debug("Found migration file %s", filepath) 55 | module_name = os.path.splitext(os.path.basename(filepath))[0] 56 | module_timestamp = module_name.split("-")[0] 57 | spec = importlib.util.spec_from_file_location(module_name, filepath) 58 | 59 | if spec is None or spec.loader is None: 60 | raise ImportError(f"Could not import migration file {filepath}") 61 | 62 | module = importlib.util.module_from_spec(spec) 63 | spec.loader.exec_module(module) 64 | 65 | logger.debug("Adding migration %s to list", module_name) 66 | migrations[module_timestamp] = { 67 | "name": module_name, 68 | "up": getattr(module, "up"), 69 | "down": getattr(module, "down"), 70 | } 71 | 72 | return migrations 73 | 74 | 75 | def get_migration_config(config_path: Optional[str]) -> MigrationConfig: 76 | """ 77 | Returns the migration configuration. 78 | 79 | Args: 80 | config_path(str, optional): Path to the migration config file. 81 | 82 | Raises: 83 | MigrationNotInitialized: If the migration directory has not been initialized. 84 | 85 | Returns: 86 | MigrationConfig: Migration configuration 87 | """ 88 | logger.debug("Attempting to load migration config") 89 | 90 | if config_path is None: 91 | config_path = os.path.join(os.getcwd(), DEFAULT_CONFIG_FILENAME) 92 | else: 93 | config_path = os.path.abspath(config_path) 94 | 95 | if not os.path.exists(config_path): 96 | raise MigrationNotInitialized 97 | 98 | logger.debug("Loading migration config from %s", config_path) 99 | with open(config_path, "r", encoding="utf-8") as f: 100 | return MigrationConfig(**json.load(f)) 101 | -------------------------------------------------------------------------------- /pyneo4j_ogm/migrations/utils/models.py: -------------------------------------------------------------------------------- 1 | """ 2 | Pydantic validation models for configuration file and migration node. 3 | """ 4 | 5 | from datetime import datetime 6 | from typing import Any, Dict, List, Optional 7 | 8 | from pydantic import BaseModel, Field 9 | from typing_extensions import Literal 10 | 11 | from pyneo4j_ogm.core.node import NodeModel 12 | from pyneo4j_ogm.migrations.utils.defaults import DEFAULT_CONFIG_LABELS 13 | from pyneo4j_ogm.pydantic_utils import IS_PYDANTIC_V2 14 | 15 | if IS_PYDANTIC_V2: 16 | from pydantic import model_validator 17 | else: 18 | from pydantic import root_validator 19 | 20 | Scheme = Literal["basic", "kerberos", "bearer", "custom"] 21 | AuthKeys = Literal[ 22 | "username", 23 | "password", 24 | "base64_encoded_ticket", 25 | "base64_encoded_token", 26 | "principal", 27 | "credentials", 28 | "realm", 29 | "scheme", 30 | "parameters", 31 | ] 32 | 33 | 34 | class Neo4jDatabaseConfigOptions(BaseModel): 35 | """ 36 | Neo4j database options. All options accepted by the official Neo4j driver are allowed in addition to the 37 | defined ones. 38 | """ 39 | 40 | scheme: Optional[Scheme] = Field(default=None) 41 | auth: Optional[ 42 | Dict[ 43 | AuthKeys, 44 | Any, 45 | ] 46 | ] = Field(default=None) 47 | 48 | if IS_PYDANTIC_V2: 49 | 50 | @model_validator(mode="after") # type: ignore 51 | def _validate_scheme_params(cls, values: "Neo4jDatabaseConfigOptions") -> Any: # type: ignore 52 | if values.scheme is not None: 53 | if values.auth is None: 54 | raise ValueError("Missing parameters for defined auth scheme") 55 | 56 | match values.scheme: 57 | case "basic": 58 | if "username" not in values.auth or "password" not in values.auth: 59 | raise ValueError("Basic scheme requires username and password") 60 | case "kerberos": 61 | if "base64_encoded_ticket" not in values.auth: 62 | raise ValueError("Kerberos scheme requires base64_encoded_ticket") 63 | case "bearer": 64 | if "base64_encoded_token" not in values.auth: 65 | raise ValueError("Bearer scheme requires base64_encoded_token") 66 | 67 | return values 68 | 69 | model_config = {"extra": "allow"} 70 | else: 71 | 72 | @root_validator # type: ignore 73 | def _validate_scheme_params(cls, values: Dict[str, Any]) -> Dict[str, Any]: 74 | if values["scheme"] is not None: 75 | if values["auth"] is None: 76 | raise ValueError("Missing parameters for defined auth scheme") 77 | 78 | match values["scheme"]: 79 | case "basic": 80 | if "username" not in values["auth"] or "password" not in values["auth"]: 81 | raise ValueError("Basic scheme requires username") 82 | case "kerberos": 83 | if "base64_encoded_ticket" not in values["auth"]: 84 | raise ValueError("Kerberos scheme requires base64_encoded_ticket") 85 | case "bearer": 86 | if "base64_encoded_token" not in values["auth"]: 87 | raise ValueError("Bearer scheme requires base64_encoded_token") 88 | 89 | return values 90 | 91 | class Config: 92 | extra = "allow" 93 | 94 | 95 | class Neo4jDatabaseConfig(BaseModel): 96 | """ 97 | Neo4j database configuration. 98 | """ 99 | 100 | uri: str 101 | options: Optional[Neo4jDatabaseConfigOptions] = Neo4jDatabaseConfigOptions() 102 | node_labels: List[str] = DEFAULT_CONFIG_LABELS 103 | 104 | 105 | class MigrationConfig(BaseModel): 106 | """ 107 | Migration configuration. Used to validate the migration config file. 108 | """ 109 | 110 | neo4j: Neo4jDatabaseConfig 111 | migration_dir: str 112 | 113 | 114 | class AppliedMigration(BaseModel): 115 | """ 116 | Log of applied migrations. 117 | """ 118 | 119 | name: str 120 | applied_at: float = Field(default_factory=lambda: datetime.timestamp(datetime.now())) 121 | 122 | 123 | class Migration(NodeModel): 124 | """ 125 | Migration node model. 126 | """ 127 | 128 | applied_migrations: List[AppliedMigration] = [] 129 | updated_at: Optional[float] = Field(default=None) 130 | 131 | @property 132 | def get_applied_migration_identifiers(self) -> List[str]: 133 | """ 134 | Returns: 135 | List[str]: Names of applied migrations 136 | """ 137 | applied_migrations: List[str] = [] 138 | 139 | for applied_migration in self.applied_migrations: 140 | identifier = applied_migration.name.split("-")[0] 141 | applied_migrations.append(identifier) 142 | 143 | return applied_migrations 144 | -------------------------------------------------------------------------------- /pyneo4j_ogm/pydantic_utils.py: -------------------------------------------------------------------------------- 1 | """ 2 | Pydantic compatibility utility module. 3 | """ 4 | 5 | from typing import Any, Type, Union 6 | 7 | import pydantic 8 | from pydantic import BaseModel 9 | 10 | IS_PYDANTIC_V2 = int(pydantic.VERSION.split(".", maxsplit=1)[0]) >= 2 11 | 12 | if IS_PYDANTIC_V2: 13 | from pydantic import TypeAdapter 14 | else: 15 | from pydantic import parse_obj_as 16 | 17 | 18 | def parse_object_as(object_type: Type, data: Any): 19 | if IS_PYDANTIC_V2: 20 | return TypeAdapter(object_type).validate_python(data) 21 | else: 22 | return parse_obj_as(object_type, data) 23 | 24 | 25 | def get_field_type(field): 26 | if IS_PYDANTIC_V2: 27 | return field.annotation 28 | else: 29 | return field.outer_type_ 30 | 31 | 32 | def get_model_fields(model): 33 | if IS_PYDANTIC_V2: 34 | return model.model_fields 35 | else: 36 | return model.__fields__ 37 | 38 | 39 | def parse_model(model_type, data: Any): 40 | if IS_PYDANTIC_V2: 41 | return model_type.model_validate(data) 42 | else: 43 | return model_type.parse_obj(data) 44 | 45 | 46 | def get_extra_field_info(field, parameter: str): 47 | if IS_PYDANTIC_V2: 48 | if field.json_schema_extra is not None: 49 | return field.json_schema_extra.get(parameter) 50 | return None 51 | else: 52 | return field.field_info.extra.get(parameter) 53 | 54 | 55 | def get_config_value(model, parameter: str): 56 | if IS_PYDANTIC_V2: 57 | return model.model_config.get(parameter) 58 | else: 59 | return getattr(model.Config, parameter, None) 60 | 61 | 62 | def get_model_dump(model: BaseModel, *args, **kwargs): 63 | if IS_PYDANTIC_V2: 64 | return model.model_dump(*args, **kwargs) 65 | else: 66 | return model.dict(*args, **kwargs) 67 | 68 | 69 | def get_model_dump_json(model: BaseModel, *args, **kwargs): 70 | if IS_PYDANTIC_V2: 71 | return model.model_dump_json(*args, **kwargs) 72 | else: 73 | return model.json(*args, **kwargs) 74 | 75 | 76 | def get_schema(model: Union[BaseModel, Type[BaseModel]], *args, **kwargs): 77 | if IS_PYDANTIC_V2: 78 | return model.model_json_schema(*args, **kwargs) 79 | else: 80 | return model.schema(*args, **kwargs) 81 | -------------------------------------------------------------------------------- /pyneo4j_ogm/queries/types.py: -------------------------------------------------------------------------------- 1 | """ 2 | Types used to describe queries. 3 | """ 4 | from enum import Enum 5 | from typing import Any, Dict, List, Literal, Optional, Union 6 | 7 | from typing_extensions import NotRequired, TypedDict 8 | 9 | 10 | class QueryOptionsOrder(str, Enum): 11 | """ 12 | Enum for ordering options in a query. 13 | """ 14 | 15 | ASCENDING = "ASC" 16 | DESCENDING = "DESC" 17 | 18 | 19 | class RelationshipMatchDirection(str, Enum): 20 | """ 21 | Enum for ordering options in a query. 22 | """ 23 | 24 | INCOMING = "INCOMING" 25 | OUTGOING = "OUTGOING" 26 | BOTH = "BOTH" 27 | 28 | 29 | NumericQueryDataType = Union[int, float] 30 | 31 | # We need to define 5 different typed dictionaries here because the `$size` operator can only be 32 | # one of the following, which means we have to create a Union of the five listed below to not get 33 | # any more type hints if one has already been used. 34 | NumericEqualsOperator = TypedDict( 35 | "NumericEqualsOperator", 36 | { 37 | "$eq": NumericQueryDataType, 38 | }, 39 | ) 40 | 41 | NumericNotEqualsOperator = TypedDict( 42 | "NumericNotEqualsOperator", 43 | { 44 | "$neq": NumericQueryDataType, 45 | }, 46 | ) 47 | 48 | NumericGreaterThanOperator = TypedDict( 49 | "NumericGreaterThanOperator", 50 | { 51 | "$gt": NumericQueryDataType, 52 | }, 53 | ) 54 | 55 | NumericGreaterThanEqualsOperator = TypedDict( 56 | "NumericGreaterThanEqualsOperator", 57 | { 58 | "$gte": NumericQueryDataType, 59 | }, 60 | ) 61 | 62 | NumericLessThanOperator = TypedDict( 63 | "NumericLessThanOperator", 64 | { 65 | "$lt": NumericQueryDataType, 66 | }, 67 | ) 68 | 69 | NumericLessThanEqualsOperator = TypedDict( 70 | "NumericLessThanEqualsOperator", 71 | { 72 | "$lte": NumericQueryDataType, 73 | }, 74 | ) 75 | 76 | QueryOperators = TypedDict( 77 | "QueryOperators", 78 | { 79 | "$eq": Optional[Any], 80 | "$neq": Optional[Any], 81 | "$gt": Optional[NumericQueryDataType], 82 | "$gte": Optional[NumericQueryDataType], 83 | "$lt": Optional[NumericQueryDataType], 84 | "$lte": Optional[NumericQueryDataType], 85 | "$in": Optional[List[Any]], 86 | "$nin": Optional[List[Any]], 87 | "$all": Optional[List[Any]], 88 | "$size": Optional[ 89 | Union[ 90 | NumericEqualsOperator, 91 | NumericNotEqualsOperator, 92 | NumericGreaterThanOperator, 93 | NumericGreaterThanEqualsOperator, 94 | NumericLessThanOperator, 95 | NumericLessThanEqualsOperator, 96 | NumericQueryDataType, 97 | ] 98 | ], 99 | "$contains": Optional[str], 100 | "$exists": Optional[bool], 101 | "$icontains": Optional[str], 102 | "$startsWith": Optional[str], 103 | "$istartsWith": Optional[str], 104 | "$endsWith": Optional[str], 105 | "$iendsWith": Optional[str], 106 | "$regex": Optional[str], 107 | "$not": Optional["QueryOperators"], 108 | "$and": Optional[List["QueryOperators"]], 109 | "$or": Optional[List["QueryOperators"]], 110 | "$xor": Optional[List["QueryOperators"]], 111 | }, 112 | total=False, 113 | ) 114 | 115 | PatternNodeOperators = TypedDict( 116 | "PatternNodeOperators", 117 | {"$elementId": Optional[str], "$id": Optional[int], "$labels": Optional[List[str]]}, 118 | total=False, 119 | ) 120 | 121 | PatternRelationshipOperators = TypedDict( 122 | "PatternRelationshipOperators", 123 | {"$elementId": Optional[str], "$id": Optional[int], "$type": Optional[Union[str, List[str]]]}, 124 | total=False, 125 | ) 126 | 127 | PatternOperator = TypedDict( 128 | "PatternOperator", 129 | { 130 | "$exists": Optional[bool], 131 | "$direction": Optional[RelationshipMatchDirection], 132 | "$relationship": Optional[Union[Dict[str, Union[QueryOperators, Any]], PatternRelationshipOperators]], 133 | "$node": Optional[Union[Dict[str, Union[QueryOperators, Any]], PatternNodeOperators]], 134 | }, 135 | total=False, 136 | ) 137 | 138 | MultiHopRelationship = TypedDict( 139 | "MultiHopRelationship", {"$elementId": NotRequired[Optional[str]], "$id": NotRequired[Optional[int]], "$type": str} 140 | ) 141 | 142 | MultiHopNode = TypedDict( 143 | "MultiHopNode", 144 | {"$elementId": NotRequired[Optional[str]], "$id": NotRequired[Optional[int]], "$labels": Union[List[str], str]}, 145 | ) 146 | 147 | # We need to define different interfaces for nodes and relationships to not show invalid operants 148 | # for the model type. 149 | QueryNodeOperators = TypedDict( 150 | "QueryNodeOperators", 151 | {"$elementId": Optional[str], "$id": Optional[int], "$patterns": Optional[List[PatternOperator]]}, 152 | total=False, 153 | ) 154 | 155 | QueryRelationshipOperators = TypedDict( 156 | "QueryRelationshipOperators", {"$elementId": Optional[str], "$id": Optional[int]}, total=False 157 | ) 158 | 159 | QueryRelationshipPropertyOperators = TypedDict( 160 | "QueryRelationshipPropertyOperators", 161 | { 162 | "$elementId": Optional[str], 163 | "$id": Optional[int], 164 | "$patterns": Optional[List[PatternOperator]], 165 | "$relationship": Optional[Union[Dict[str, Union[QueryOperators, Any]], QueryRelationshipOperators]], 166 | }, 167 | total=False, 168 | ) 169 | 170 | # The actual interfaces used to describe query filters 171 | NodeFilters = Union[Dict[str, Union[QueryOperators, Any, Any]], QueryNodeOperators] 172 | RelationshipFilters = Union[Dict[str, Union[QueryOperators, Any, Any]], QueryRelationshipOperators] 173 | RelationshipPropertyFilters = Union[Dict[str, Union[QueryOperators, Any, Any]], QueryRelationshipPropertyOperators] 174 | 175 | MultiHopFilters = TypedDict( 176 | "MultiHopFilters", 177 | { 178 | "$minHops": NotRequired[Optional[int]], 179 | "$maxHops": NotRequired[Optional[Union[int, Literal["*"]]]], 180 | "$node": Union[Dict[str, Union[QueryOperators, Any, Any]], MultiHopNode], 181 | "$relationships": NotRequired[ 182 | Optional[List[Union[Dict[str, Union[QueryOperators, Any, Any]], MultiHopRelationship]]] 183 | ], 184 | "$direction": NotRequired[Optional[RelationshipMatchDirection]], 185 | }, 186 | ) 187 | 188 | 189 | # Interface to describe query options 190 | class QueryOptions(TypedDict, total=False): 191 | """ 192 | Interface to describe query options. 193 | """ 194 | 195 | limit: Optional[int] 196 | skip: Optional[int] 197 | sort: Optional[Union[List[str], str]] 198 | order: Optional[QueryOptionsOrder] 199 | 200 | 201 | # Interface for a projection 202 | Projection = Dict[str, Union[str, Literal["$elementId"], Literal["$id"]]] 203 | -------------------------------------------------------------------------------- /pyneo4j_ogm/queries/validators.py: -------------------------------------------------------------------------------- 1 | """ 2 | Pydantic validators for query operators and filters. 3 | """ 4 | 5 | # pylint: disable=unused-argument 6 | 7 | from typing import Any, Dict, List, Literal, Optional, Type, Union 8 | 9 | from pydantic import BaseModel, Field, ValidationError 10 | 11 | from pyneo4j_ogm.logger import logger 12 | from pyneo4j_ogm.pydantic_utils import ( 13 | IS_PYDANTIC_V2, 14 | get_model_dump, 15 | get_model_fields, 16 | parse_model, 17 | ) 18 | from pyneo4j_ogm.queries.types import ( 19 | NumericQueryDataType, 20 | QueryOptionsOrder, 21 | RelationshipMatchDirection, 22 | ) 23 | 24 | if IS_PYDANTIC_V2: 25 | from pydantic import field_validator, model_validator 26 | else: 27 | from pydantic.class_validators import root_validator, validator 28 | 29 | 30 | def _normalize_fields(cls: Type[BaseModel], values: Any) -> Any: 31 | """ 32 | Normalizes and validates model property fields. 33 | 34 | Args: 35 | values (Any): The values to normalize and validate. 36 | 37 | Returns: 38 | Any: The normalized and validated values. 39 | """ 40 | normalized_values: Dict[str, Any] = get_model_dump(values) if isinstance(values, BaseModel) else values 41 | validated_values: Dict[str, Any] = {} 42 | 43 | for property_name, property_value in normalized_values.items(): 44 | if property_name not in get_model_fields(cls).keys(): 45 | try: 46 | validated = parse_model(QueryOperatorModel, property_value) 47 | validated_value = get_model_dump( 48 | model=validated, by_alias=True, exclude_none=True, exclude_unset=True, exclude_defaults=True 49 | ) 50 | 51 | if len(validated_value.keys()) > 0: 52 | validated_values[property_name] = validated_value 53 | except ValidationError: 54 | logger.debug("Invalid field %s found, omitting field", property_name) 55 | else: 56 | validated_values[property_name] = property_value 57 | 58 | if IS_PYDANTIC_V2: 59 | return cls.model_construct(**validated_values) 60 | return validated_values 61 | 62 | 63 | def _normalize_labels(cls, value: Optional[Union[str, List[str]]]) -> Optional[List[str]]: 64 | """ 65 | Validator for `$labels` operator. If a string is passed, it will be converted to a list. 66 | 67 | Args: 68 | v (Optional[Union[str, List[str]]]): The value to validate. 69 | 70 | Returns: 71 | Optional[List[str]]: Validated value. 72 | """ 73 | if isinstance(value, str): 74 | return [value] 75 | return value 76 | 77 | 78 | def _normalize_sort(cls, value: Optional[Union[str, List[str]]]) -> Optional[List[str]]: 79 | """ 80 | Validator for `sort` option. If a string is passed, it will be converted to a list. 81 | 82 | Args: 83 | v (Optional[Union[str, List[str]]]): The value to validate. 84 | 85 | Returns: 86 | Optional[List[str]]: Validated value. 87 | """ 88 | if isinstance(value, str): 89 | return [value] 90 | return value 91 | 92 | 93 | class NumericEqualsOperatorModel(BaseModel): 94 | """ 95 | Validator for `$eq` operator in combined use with `$size` operator. 96 | """ 97 | 98 | eq_: Any = Field(alias="$eq") 99 | 100 | 101 | class NumericNotEqualsOperatorModel(BaseModel): 102 | """ 103 | Validator for `$neq` operator in combined use with `$size` operator. 104 | """ 105 | 106 | neq_: Any = Field(alias="$neq") 107 | 108 | 109 | class NumericGreaterThanOperatorModel(BaseModel): 110 | """ 111 | Validator for `$gt` operator in combined use with `$size` operator. 112 | """ 113 | 114 | gt_: Any = Field(alias="$gt") 115 | 116 | 117 | class NumericGreaterThanEqualsOperatorModel(BaseModel): 118 | """ 119 | Validator for `$gte` operator in combined use with `$size` operator. 120 | """ 121 | 122 | gte_: Any = Field(alias="$gte") 123 | 124 | 125 | class NumericLessThanOperatorModel(BaseModel): 126 | """ 127 | Validator for `$lt` operator in combined use with `$size` operator. 128 | """ 129 | 130 | lt_: Any = Field(alias="$lt") 131 | 132 | 133 | class NumericLessThanEqualsOperatorModel(BaseModel): 134 | """ 135 | Validator for `$lte` operator in combined use with `$size` operator. 136 | """ 137 | 138 | lte_: Any = Field(alias="$lte") 139 | 140 | 141 | class QueryOperatorModel(BaseModel): 142 | """ 143 | Validator for query operators defined in a property. 144 | """ 145 | 146 | eq_: Optional[Any] = Field(alias="$eq", default=None) 147 | neq_: Optional[Any] = Field(alias="$neq", default=None) 148 | gt_: Optional[NumericQueryDataType] = Field(alias="$gt", default=None) 149 | gte_: Optional[NumericQueryDataType] = Field(alias="$gte", default=None) 150 | lt_: Optional[NumericQueryDataType] = Field(alias="$lt", default=None) 151 | lte_: Optional[NumericQueryDataType] = Field(alias="$lte", default=None) 152 | in__: Optional[List[Any]] = Field(alias="$in", default=None) 153 | nin_: Optional[List[Any]] = Field(alias="$nin", default=None) 154 | all_: Optional[List[Any]] = Field(alias="$all", default=None) 155 | size_: Optional[ 156 | Union[ 157 | NumericQueryDataType, 158 | NumericNotEqualsOperatorModel, 159 | NumericEqualsOperatorModel, 160 | NumericGreaterThanOperatorModel, 161 | NumericGreaterThanEqualsOperatorModel, 162 | NumericLessThanOperatorModel, 163 | NumericLessThanEqualsOperatorModel, 164 | ] 165 | ] = Field(alias="$size", default=None) 166 | contains_: Optional[str] = Field(alias="$contains", default=None) 167 | exists_: Optional[bool] = Field(alias="$exists", default=None) 168 | i_contains_: Optional[str] = Field(alias="$icontains", default=None) 169 | starts_with_: Optional[str] = Field(alias="$startsWith", default=None) 170 | i_starts_with_: Optional[str] = Field(alias="$istartsWith", default=None) 171 | ends_with_: Optional[str] = Field(alias="$endsWith", default=None) 172 | i_ends_with_: Optional[str] = Field(alias="$iendsWith", default=None) 173 | regex_: Optional[str] = Field(alias="$regex", default=None) 174 | not_: Optional["QueryOperatorModel"] = Field(alias="$not", default=None) 175 | and_: Optional[List["QueryOperatorModel"]] = Field(alias="$and", default=None) 176 | or_: Optional[List["QueryOperatorModel"]] = Field(alias="$or", default=None) 177 | xor_: Optional[List["QueryOperatorModel"]] = Field(alias="$xor", default=None) 178 | 179 | 180 | class NodeFiltersModel(BaseModel): 181 | """ 182 | Validator model for node filters. 183 | """ 184 | 185 | element_id_: Optional[str] = Field(alias="$elementId", default=None) 186 | id_: Optional[int] = Field(alias="$id", default=None) 187 | patterns_: Optional[List["PatternOperatorModel"]] = Field(alias="$patterns", default=None) 188 | 189 | if IS_PYDANTIC_V2: 190 | normalize_and_validate_fields = model_validator(mode="after")(_normalize_fields) 191 | 192 | model_config = { 193 | "extra": "allow", 194 | "use_enum_values": True, 195 | } 196 | else: 197 | normalize_and_validate_fields = root_validator(allow_reuse=True)(_normalize_fields) 198 | 199 | class Config: 200 | """ 201 | Pydantic configuration 202 | """ 203 | 204 | extra = "allow" 205 | use_enum_values = True 206 | 207 | 208 | class RelationshipFiltersModel(BaseModel): 209 | """ 210 | Validator model for relationship filters. 211 | """ 212 | 213 | element_id_: Optional[str] = Field(alias="$elementId", default=None) 214 | id_: Optional[int] = Field(alias="$id", default=None) 215 | 216 | if IS_PYDANTIC_V2: 217 | normalize_and_validate_fields = model_validator(mode="after")(_normalize_fields) 218 | 219 | model_config = { 220 | "extra": "allow", 221 | "use_enum_values": True, 222 | } 223 | else: 224 | normalize_and_validate_fields = root_validator(allow_reuse=True)(_normalize_fields) 225 | 226 | class Config: 227 | """ 228 | Pydantic configuration 229 | """ 230 | 231 | extra = "allow" 232 | use_enum_values = True 233 | 234 | 235 | class RelationshipPropertyFiltersModel(BaseModel): 236 | """ 237 | Validator model for relationship filters. 238 | """ 239 | 240 | element_id_: Optional[str] = Field(alias="$elementId", default=None) 241 | id_: Optional[int] = Field(alias="$id", default=None) 242 | patterns_: Optional[List["PatternOperatorModel"]] = Field(alias="$patterns", default=None) 243 | relationship_: Optional[RelationshipFiltersModel] = Field(alias="$relationship", default=None) 244 | 245 | if IS_PYDANTIC_V2: 246 | normalize_and_validate_fields = model_validator(mode="after")(_normalize_fields) 247 | 248 | model_config = { 249 | "extra": "allow", 250 | "use_enum_values": True, 251 | } 252 | else: 253 | normalize_and_validate_fields = root_validator(allow_reuse=True)(_normalize_fields) 254 | 255 | class Config: 256 | """ 257 | Pydantic configuration 258 | """ 259 | 260 | extra = "allow" 261 | use_enum_values = True 262 | 263 | 264 | class PatternNodeOperatorsModel(BaseModel): 265 | """ 266 | Validator model for node pattern operators. 267 | """ 268 | 269 | element_id_: Optional[str] = Field(alias="$elementId", default=None) 270 | id_: Optional[int] = Field(alias="$id") 271 | labels_: Optional[Union[List[str], str]] = Field(alias="$labels", default=None) 272 | 273 | if IS_PYDANTIC_V2: 274 | normalize_and_validate_fields = model_validator(mode="after")(_normalize_fields) 275 | normalize_and_validate_labels = field_validator("labels_", mode="before")(_normalize_labels) 276 | 277 | model_config = { 278 | "extra": "allow", 279 | "use_enum_values": True, 280 | } 281 | else: 282 | normalize_and_validate_fields = root_validator(allow_reuse=True)(_normalize_fields) 283 | normalize_and_validate_labels = validator("labels_", pre=True)(_normalize_labels) 284 | 285 | class Config: 286 | """ 287 | Pydantic configuration 288 | """ 289 | 290 | extra = "allow" 291 | use_enum_values = True 292 | 293 | 294 | class PatternRelationshipOperatorsModel(NodeFiltersModel): 295 | """ 296 | Validator model for relationship pattern operators. 297 | """ 298 | 299 | element_id_: Optional[str] = Field(alias="$elementId", default=None) 300 | id_: Optional[int] = Field(alias="$id", default=None) 301 | type_: Optional[Union[str, List[str]]] = Field(alias="$type", default=None) 302 | 303 | if IS_PYDANTIC_V2: 304 | normalize_and_validate_fields = model_validator(mode="after")(_normalize_fields) 305 | 306 | model_config = { 307 | "extra": "allow", 308 | "use_enum_values": True, 309 | } 310 | else: 311 | normalize_and_validate_fields = root_validator(allow_reuse=True)(_normalize_fields) 312 | 313 | class Config: 314 | """ 315 | Pydantic configuration 316 | """ 317 | 318 | extra = "allow" 319 | use_enum_values = True 320 | 321 | 322 | class PatternOperatorModel(BaseModel): 323 | """ 324 | Validator for pattern operators defined in a property. 325 | """ 326 | 327 | exists_: bool = Field(default=False, alias="$exists") 328 | direction_: RelationshipMatchDirection = Field(default=RelationshipMatchDirection.OUTGOING, alias="$direction") 329 | node_: Optional[PatternNodeOperatorsModel] = Field(alias="$node", default=None) 330 | relationship_: Optional[PatternRelationshipOperatorsModel] = Field(alias="$relationship", default=None) 331 | 332 | 333 | class MultiHopRelationshipOperatorsModel(NodeFiltersModel): 334 | """ 335 | Validator model for a relationship operator in a multi hop filter. 336 | """ 337 | 338 | element_id_: Optional[str] = Field(alias="$elementId", default=None) 339 | id_: Optional[int] = Field(alias="$id", default=None) 340 | type_: str = Field(alias="$type") 341 | 342 | if IS_PYDANTIC_V2: 343 | normalize_and_validate_fields = model_validator(mode="after")(_normalize_fields) 344 | 345 | model_config = { 346 | "extra": "allow", 347 | "use_enum_values": True, 348 | } 349 | else: 350 | normalize_and_validate_fields = root_validator(allow_reuse=True)(_normalize_fields) 351 | 352 | class Config: 353 | """ 354 | Pydantic configuration 355 | """ 356 | 357 | extra = "allow" 358 | use_enum_values = True 359 | 360 | 361 | class MultiHopNodeModel(BaseModel): 362 | """ 363 | Validator model for multi hop node operators. 364 | """ 365 | 366 | element_id_: Optional[str] = Field(alias="$elementId", default=None) 367 | id_: Optional[int] = Field(alias="$id", default=None) 368 | labels_: Union[List[str], str] = Field(alias="$labels") 369 | 370 | if IS_PYDANTIC_V2: 371 | normalize_and_validate_fields = model_validator(mode="after")(_normalize_fields) 372 | normalize_and_validate_labels = field_validator("labels_", mode="before")(_normalize_labels) 373 | 374 | model_config = { 375 | "extra": "allow", 376 | "use_enum_values": True, 377 | } 378 | else: 379 | normalize_and_validate_fields = root_validator(allow_reuse=True)(_normalize_fields) 380 | normalize_and_validate_labels = validator("labels_", pre=True, allow_reuse=True)(_normalize_labels) 381 | 382 | class Config: 383 | """ 384 | Pydantic configuration 385 | """ 386 | 387 | extra = "allow" 388 | use_enum_values = True 389 | 390 | 391 | class MultiHopFiltersModel(BaseModel): 392 | """ 393 | Validator model for node and relationship filters with multiple hops between the nodes. 394 | """ 395 | 396 | min_hops_: Optional[int] = Field(alias="$minHops", ge=0, default=None) 397 | max_hops_: Optional[Union[int, Literal["*"]]] = Field(alias="$maxHops", default="*") 398 | node_: MultiHopNodeModel = Field(alias="$node") 399 | relationships_: Optional[List[MultiHopRelationshipOperatorsModel]] = Field(alias="$relationships", default=None) 400 | direction_: Optional[RelationshipMatchDirection] = Field( 401 | default=RelationshipMatchDirection.OUTGOING, alias="$direction" 402 | ) 403 | 404 | if IS_PYDANTIC_V2: 405 | normalize_and_validate_fields = model_validator(mode="after")(_normalize_fields) 406 | 407 | @field_validator("max_hops_") 408 | def validate_max_hops_v2(cls, v: Any) -> Any: 409 | if isinstance(v, int) and v <= 0: 410 | raise ValueError("$maxHops must be greater than 0") 411 | return v 412 | 413 | model_config = { 414 | "extra": "allow", 415 | "use_enum_values": True, 416 | } 417 | else: 418 | normalize_and_validate_fields = root_validator(allow_reuse=True)(_normalize_fields) 419 | 420 | @validator("max_hops_") 421 | def validate_max_hops_v1(cls, v: Any) -> Any: 422 | if isinstance(v, int) and v <= 0: 423 | raise ValueError("$maxHops must be greater than 0") 424 | return v 425 | 426 | class Config: 427 | """ 428 | Pydantic configuration 429 | """ 430 | 431 | extra = "allow" 432 | use_enum_values = True 433 | 434 | 435 | class QueryOptionModel(BaseModel): 436 | """ 437 | Validator model for query options. 438 | """ 439 | 440 | limit: Optional[int] = Field(default=None, gt=0) 441 | skip: Optional[int] = Field(default=None, ge=0) 442 | sort: Optional[Union[List[str], str]] = Field(default=None) 443 | order: Optional[QueryOptionsOrder] = Field(default=None) 444 | 445 | if IS_PYDANTIC_V2: 446 | normalize_list_validator = field_validator("sort", mode="before")(_normalize_sort) 447 | 448 | model_config = { 449 | "extra": "allow", 450 | "use_enum_values": True, 451 | } 452 | else: 453 | normalize_list_validator = validator("sort", pre=True)(_normalize_sort) 454 | 455 | class Config: 456 | """ 457 | Pydantic configuration 458 | """ 459 | 460 | extra = "allow" 461 | use_enum_values = True 462 | 463 | 464 | if IS_PYDANTIC_V2: 465 | NodeFiltersModel.model_rebuild() 466 | RelationshipFiltersModel.model_rebuild() 467 | RelationshipPropertyFiltersModel.model_rebuild() 468 | MultiHopFiltersModel.model_rebuild() 469 | else: 470 | NodeFiltersModel.update_forward_refs() 471 | RelationshipFiltersModel.update_forward_refs() 472 | RelationshipPropertyFiltersModel.update_forward_refs() 473 | MultiHopFiltersModel.update_forward_refs() 474 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.poetry] 2 | name = "pyneo4j-ogm" 3 | version = "0.6.0" 4 | description = "Asynchronous Python OGM for Neo4j" 5 | authors = ["groc-prog "] 6 | maintainers = ["groc-prog "] 7 | readme = "README.md" 8 | license = "MIT" 9 | keywords = ["neo4j", "python", "orm", "ogm", "async", "asynchronous", "database", "graph-database", "pydantic"] 10 | homepage = "https://github.com/groc-prog/pyneo4j-ogm" 11 | repository = "https://github.com/groc-prog/pyneo4j-ogm" 12 | documentation = "https://github.com/groc-prog/pyneo4j-ogm#readme" 13 | classifiers = [ 14 | "Development Status :: 4 - Beta", 15 | "Intended Audience :: Developers", 16 | "Natural Language :: English", 17 | "Operating System :: OS Independent", 18 | "Topic :: Database", 19 | "Topic :: Software Development :: Libraries :: Python Modules", 20 | "Typing :: Typed", 21 | ] 22 | packages = [{ include = "pyneo4j_ogm" }] 23 | 24 | [tool.poetry.dependencies] 25 | python = "^3.10" 26 | pydantic = ">=1.10,<3.0" 27 | neo4j = "^5.9.0" 28 | typing-extensions = "^4.8.0" 29 | argparse = "^1.4.0" 30 | 31 | [tool.poetry.group.dev.dependencies] 32 | pylint = "^2.17.5" 33 | black = "^23.7.0" 34 | pytest = "^7.4.0" 35 | pre-commit = "^3.3.3" 36 | pytest-asyncio = "^0.21.1" 37 | pyright = "^1.1.325" 38 | isort = "^5.12.0" 39 | pytest-cov = "^4.1.0" 40 | 41 | [tool.poetry.scripts] 42 | pyneo4j_ogm = "pyneo4j_ogm.migrations.cli:cli" 43 | 44 | [tool.semantic_release] 45 | assets = [] 46 | build_command = "pip install poetry && poetry build" 47 | commit_message = "🎉 {version}\n\nSee the full changelog at https://github.com/groc-prog/pyneo4j-ogm/blob/main/CHANGELOG.md" 48 | commit_parser = "angular" 49 | logging_use_named_masks = false 50 | major_on_zero = true 51 | allow_zero_version = true 52 | tag_format = "v{version}" 53 | version_toml = ["pyproject.toml:tool.poetry.version"] 54 | 55 | [tool.semantic_release.branches.main] 56 | match = "(main|master|develop)" 57 | prerelease_token = "rc" 58 | prerelease = false 59 | 60 | [tool.semantic_release.changelog] 61 | template_dir = "templates" 62 | changelog_file = "CHANGELOG.md" 63 | exclude_commit_patterns = [] 64 | 65 | [tool.semantic_release.changelog.environment] 66 | block_start_string = "{%" 67 | block_end_string = "%}" 68 | variable_start_string = "{{" 69 | variable_end_string = "}}" 70 | comment_start_string = "{#" 71 | comment_end_string = "#}" 72 | trim_blocks = false 73 | lstrip_blocks = false 74 | newline_sequence = "\n" 75 | keep_trailing_newline = false 76 | extensions = [] 77 | autoescape = true 78 | 79 | [tool.semantic_release.commit_parser_options] 80 | allowed_tags = ["build", "chore", "ci", "docs", "feat", "fix", "perf", "style", "refactor", "test"] 81 | minor_tags = ["feat"] 82 | patch_tags = ["fix", "perf"] 83 | default_bump_level = 0 84 | 85 | [tool.semantic_release.remote] 86 | name = "origin" 87 | type = "github" 88 | ignore_token_for_push = false 89 | 90 | [tool.black] 91 | line-length = 120 92 | target-version = ["py310", "py311"] 93 | workers = 4 94 | 95 | [tool.isort] 96 | profile = "black" 97 | 98 | [build-system] 99 | requires = ["poetry-core"] 100 | build-backend = "poetry.core.masonry.api" 101 | -------------------------------------------------------------------------------- /pyrightconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "reportIncompatibleVariableOverride": false, 3 | "reportInvalidTypeForm": false, 4 | "reportIncompatibleMethodOverride": false, 5 | "reportUnboundVariable": false 6 | } 7 | -------------------------------------------------------------------------------- /templates/CHANGELOG.md.j2: -------------------------------------------------------------------------------- 1 | # CHANGELOG 2 | {% if context.history.unreleased | length > 0 %} 3 | 4 | {# UNRELEASED #} 5 | ## Unreleased 6 | {% for type_, commits in context.history.unreleased | dictsort %} 7 | {% if type_ in ["feature", "fix", "performance", "documentation", "refactor"] %} 8 | ### {{ type_ | capitalize }} 9 | {% for commit in commits %}{% if type_ in ["feature", "fix", "performance", "documentation", "refactor"] %} 10 | * {{ commit.commit.message.rstrip() }} ([`{{ commit.commit.hexsha[:7] }}`]({{ commit.commit.hexsha | commit_hash_url }})) 11 | {% endif %}{% endfor %}{% endif %}{% endfor %} 12 | 13 | {% endif %} 14 | 15 | {# RELEASED #} 16 | {% for version, release in context.history.released.items() %} 17 | ## What's Changed in {{ version.as_tag() }} ({{ release.tagged_date.strftime("%Y-%m-%d") }}) 18 | {% for type_, commits in release["elements"] | dictsort %} 19 | {% if type_ in ["feature", "fix", "performance", "documentation", "refactor"] %} 20 | ### {{ type_ | capitalize }} 21 | {% for commit in commits %} 22 | {% if type_ in ["feature", "fix", "performance", "documentation", "refactor"] %} 23 | * {{ commit.commit.message.rstrip() }} ([`{{ commit.commit.hexsha[:7] }}`]({{ commit.commit.hexsha | commit_hash_url }})) 24 | {% endif %}{% endfor %}{% endif %}{% endfor %}{% endfor %} 25 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/groc-prog/pyneo4j-ogm/988b662416d5cace14e7c0d055432a6d5698918e/tests/__init__.py -------------------------------------------------------------------------------- /tests/core/test_base.py: -------------------------------------------------------------------------------- 1 | # pylint: disable=unused-argument, unused-variable, unused-import, redefined-outer-name, protected-access, missing-module-docstring, missing-class-docstring 2 | # pyright: reportGeneralTypeIssues=false 3 | 4 | import json 5 | from typing import Union, cast 6 | from unittest.mock import AsyncMock, MagicMock 7 | 8 | import pytest 9 | 10 | from pyneo4j_ogm.core.base import ModelBase, hooks 11 | from pyneo4j_ogm.core.node import NodeModel 12 | from pyneo4j_ogm.core.relationship import RelationshipModel 13 | from pyneo4j_ogm.exceptions import ListItemNotEncodable, UnregisteredModel 14 | from pyneo4j_ogm.fields.settings import BaseModelSettings 15 | from pyneo4j_ogm.pydantic_utils import get_model_dump, get_model_dump_json 16 | from tests.fixtures.db_setup import Developer 17 | 18 | 19 | def hook_func(): 20 | pass 21 | 22 | 23 | def test_pre_hooks(): 24 | Developer.register_pre_hooks("test_hook", lambda: None) 25 | assert len(Developer._settings.pre_hooks["test_hook"]) == 1 26 | assert all(callable(func) for func in Developer._settings.pre_hooks["test_hook"]) 27 | Developer._settings.pre_hooks["test_hook"] = [] 28 | 29 | Developer.register_pre_hooks("test_hook", [lambda: None, lambda: None]) 30 | assert len(Developer._settings.pre_hooks["test_hook"]) == 2 31 | assert all(callable(func) for func in Developer._settings.pre_hooks["test_hook"]) 32 | Developer._settings.pre_hooks["test_hook"] = [] 33 | 34 | Developer.register_pre_hooks("test_hook", [lambda: None, "invalid"]) # type: ignore 35 | assert len(Developer._settings.pre_hooks["test_hook"]) == 1 36 | assert all(callable(func) for func in Developer._settings.pre_hooks["test_hook"]) 37 | Developer._settings.pre_hooks["test_hook"] = [] 38 | 39 | Developer.register_pre_hooks("test_hook", lambda: None) 40 | Developer.register_pre_hooks("test_hook", lambda: None, overwrite=True) 41 | assert len(Developer._settings.pre_hooks["test_hook"]) == 1 42 | assert all(callable(func) for func in Developer._settings.pre_hooks["test_hook"]) 43 | Developer._settings.pre_hooks["test_hook"] = [] 44 | 45 | Developer.register_pre_hooks("test_hook", lambda: None) 46 | Developer.register_pre_hooks("test_hook", lambda: None) 47 | assert len(Developer._settings.pre_hooks["test_hook"]) == 2 48 | assert all(callable(func) for func in Developer._settings.pre_hooks["test_hook"]) 49 | Developer._settings.pre_hooks["test_hook"] = [] 50 | 51 | 52 | def test_post_hooks(): 53 | Developer.register_post_hooks("test_hook", lambda: None) 54 | assert len(Developer._settings.post_hooks["test_hook"]) == 1 55 | assert all(callable(func) for func in Developer._settings.post_hooks["test_hook"]) 56 | Developer._settings.post_hooks["test_hook"] = [] 57 | 58 | Developer.register_post_hooks("test_hook", [lambda: None, lambda: None]) 59 | assert len(Developer._settings.post_hooks["test_hook"]) == 2 60 | assert all(callable(func) for func in Developer._settings.post_hooks["test_hook"]) 61 | Developer._settings.post_hooks["test_hook"] = [] 62 | 63 | Developer.register_post_hooks("test_hook", [lambda: None, "invalid"]) # type: ignore 64 | assert len(Developer._settings.post_hooks["test_hook"]) == 1 65 | assert all(callable(func) for func in Developer._settings.post_hooks["test_hook"]) 66 | Developer._settings.post_hooks["test_hook"] = [] 67 | 68 | Developer.register_post_hooks("test_hook", lambda: None) 69 | Developer.register_post_hooks("test_hook", lambda: None, overwrite=True) 70 | assert len(Developer._settings.post_hooks["test_hook"]) == 1 71 | assert all(callable(func) for func in Developer._settings.post_hooks["test_hook"]) 72 | Developer._settings.post_hooks["test_hook"] = [] 73 | 74 | Developer.register_post_hooks("test_hook", lambda: None) 75 | Developer.register_post_hooks("test_hook", lambda: None) 76 | assert len(Developer._settings.post_hooks["test_hook"]) == 2 77 | assert all(callable(func) for func in Developer._settings.post_hooks["test_hook"]) 78 | Developer._settings.post_hooks["test_hook"] = [] 79 | 80 | 81 | def test_model_settings(): 82 | class ModelSettingsTest(NodeModel): 83 | pass 84 | 85 | class Settings: 86 | pre_hooks = {"test_hook": [hook_func]} 87 | post_hooks = {"test_hook": [hook_func, hook_func]} 88 | 89 | assert ModelSettingsTest.model_settings().pre_hooks == {"test_hook": [hook_func]} 90 | assert ModelSettingsTest.model_settings().post_hooks == {"test_hook": [hook_func, hook_func]} 91 | 92 | 93 | def test_node_model_modified_properties(): 94 | class ModifiedPropertiesTest(NodeModel): 95 | a: str = "a" 96 | b: int = 1 97 | c: bool = True 98 | 99 | setattr(ModifiedPropertiesTest, "_client", None) 100 | 101 | model = ModifiedPropertiesTest() 102 | model.a = "modified" 103 | assert model.modified_properties == {"a"} 104 | 105 | model.b = 2 106 | assert model.modified_properties == {"a", "b"} 107 | 108 | 109 | def test_relationship_model_modified_properties(): 110 | class ModifiedPropertiesTest(RelationshipModel): 111 | a: str = "a" 112 | b: int = 1 113 | c: bool = True 114 | 115 | setattr(ModifiedPropertiesTest, "_client", None) 116 | 117 | model = ModifiedPropertiesTest() 118 | model.a = "modified" 119 | assert model.modified_properties == {"a"} 120 | 121 | model.b = 2 122 | assert model.modified_properties == {"a", "b"} 123 | 124 | 125 | async def test_hooks_decorator(): 126 | class TestClass: 127 | def __init__(self): 128 | self._client = None # type: ignore 129 | self._settings = BaseModelSettings() 130 | self._settings.pre_hooks["async_test_func"] = [MagicMock(__name__="MagicMock"), AsyncMock(), AsyncMock()] 131 | self._settings.post_hooks["async_test_func"] = [MagicMock(__name__="MagicMock"), AsyncMock(), AsyncMock()] 132 | self._settings.pre_hooks["sync_test_func"] = [ 133 | MagicMock(__name__="MagicMock"), 134 | MagicMock(__name__="MagicMock"), 135 | AsyncMock(), 136 | ] 137 | self._settings.post_hooks["sync_test_func"] = [ 138 | MagicMock(__name__="MagicMock"), 139 | MagicMock(__name__="MagicMock"), 140 | AsyncMock(), 141 | ] 142 | 143 | @hooks 144 | async def async_test_func(self): 145 | pass 146 | 147 | @hooks 148 | def sync_test_func(self): 149 | pass 150 | 151 | test_instance = TestClass() 152 | await test_instance.async_test_func() 153 | 154 | for hook_function in test_instance._settings.pre_hooks["async_test_func"]: 155 | cast(Union[MagicMock, AsyncMock], hook_function).assert_called_once_with(test_instance) 156 | 157 | for hook_function in test_instance._settings.post_hooks["async_test_func"]: 158 | cast(Union[MagicMock, AsyncMock], hook_function).assert_called_once_with(test_instance, None) 159 | 160 | test_instance.sync_test_func() 161 | 162 | for hook_function in test_instance._settings.pre_hooks["sync_test_func"]: 163 | cast(Union[MagicMock, AsyncMock], hook_function).assert_called_once_with(test_instance) 164 | 165 | for hook_function in test_instance._settings.post_hooks["sync_test_func"]: 166 | cast(Union[MagicMock, AsyncMock], hook_function).assert_called_once_with(test_instance, None) 167 | 168 | 169 | def test_unregistered_model_exc(): 170 | with pytest.raises(UnregisteredModel): 171 | ModelBase() 172 | 173 | 174 | def test_deflate_non_encodable_list(): 175 | class NonEncodableModel(ModelBase): 176 | list_field: list = [object()] 177 | 178 | setattr(NonEncodableModel, "_client", None) 179 | 180 | with pytest.raises(ListItemNotEncodable): 181 | NonEncodableModel()._deflate({"list_field": [object()]}) 182 | 183 | 184 | def test_node_model_serialization(): 185 | id_ = 1 186 | element_id = "4:08f8a347-1856-487c-8705-26d2b4a69bb7:1" 187 | expected = {"a": "a", "b": 1, "c": True, "id": id_, "element_id": element_id} 188 | 189 | class NodeModelClass(NodeModel): 190 | a: str = "a" 191 | b: int = 1 192 | c: bool = True 193 | 194 | setattr(NodeModelClass, "_client", None) 195 | 196 | node_model = NodeModelClass() 197 | setattr(node_model, "_id", id_) 198 | setattr(node_model, "_element_id", element_id) 199 | 200 | node_model_dict = get_model_dump(node_model) 201 | node_model_json = get_model_dump_json(node_model) 202 | 203 | assert node_model_dict == expected 204 | assert json.loads(node_model_json) == expected 205 | 206 | 207 | def test_relationship_model_serialization(): 208 | id_ = 1 209 | element_id = "4:08f8a347-1856-487c-8705-26d2b4a69bb7:1" 210 | start_node_element_id = "4:08f8a347-1856-487c-8705-26d2b4a69bb7:2" 211 | start_node_id = 2 212 | end_node_element_id = "4:08f8a347-1856-487c-8705-26d2b4a69bb7:3" 213 | end_node_id = 3 214 | expected = { 215 | "a": "a", 216 | "b": 1, 217 | "c": True, 218 | "id": id_, 219 | "element_id": element_id, 220 | "start_node_element_id": start_node_element_id, 221 | "start_node_id": start_node_id, 222 | "end_node_element_id": end_node_element_id, 223 | "end_node_id": end_node_id, 224 | } 225 | 226 | class RelationshipModelClass(RelationshipModel): 227 | a: str = "a" 228 | b: int = 1 229 | c: bool = True 230 | 231 | setattr(RelationshipModelClass, "_client", None) 232 | 233 | relationship_model = RelationshipModelClass() 234 | setattr(relationship_model, "_id", id_) 235 | setattr(relationship_model, "_element_id", element_id) 236 | setattr(relationship_model, "_start_node_element_id", start_node_element_id) 237 | setattr(relationship_model, "_start_node_id", start_node_id) 238 | setattr(relationship_model, "_end_node_element_id", end_node_element_id) 239 | setattr(relationship_model, "_end_node_id", end_node_id) 240 | 241 | relationship_model_dict = get_model_dump(relationship_model) 242 | relationship_model_json = get_model_dump_json(relationship_model) 243 | 244 | assert relationship_model_dict == expected 245 | assert json.loads(relationship_model_json) == expected 246 | 247 | 248 | def test_node_model_serialize_exclude(): 249 | id_ = 1 250 | element_id = "4:08f8a347-1856-487c-8705-26d2b4a69bb7:1" 251 | expected_id_excluded = {"a": "a", "b": 1, "c": True, "element_id": element_id} 252 | expected_element_id_excluded = {"a": "a", "b": 1, "c": True, "id": id_} 253 | 254 | class NodeModelClass(NodeModel): 255 | a: str = "a" 256 | b: int = 1 257 | c: bool = True 258 | 259 | setattr(NodeModelClass, "_client", None) 260 | 261 | node_model = NodeModelClass() 262 | setattr(node_model, "_id", id_) 263 | setattr(node_model, "_element_id", element_id) 264 | 265 | node_model_id_excluded_dict = get_model_dump(node_model, exclude={"id"}) 266 | node_model_element_id_excluded_dict = get_model_dump(node_model, exclude={"element_id"}) 267 | node_model_id_excluded_json = get_model_dump_json(node_model, exclude={"id"}) 268 | node_model_element_id_excluded_json = get_model_dump_json(node_model, exclude={"element_id"}) 269 | 270 | assert node_model_id_excluded_dict == expected_id_excluded 271 | assert node_model_element_id_excluded_dict == expected_element_id_excluded 272 | assert json.loads(node_model_id_excluded_json) == expected_id_excluded 273 | assert json.loads(node_model_element_id_excluded_json) == expected_element_id_excluded 274 | 275 | 276 | def test_relationship_model_serialize_exclude(): 277 | id_ = 1 278 | element_id = "4:08f8a347-1856-487c-8705-26d2b4a69bb7:1" 279 | start_node_element_id = "4:08f8a347-1856-487c-8705-26d2b4a69bb7:2" 280 | start_node_id = 2 281 | end_node_element_id = "4:08f8a347-1856-487c-8705-26d2b4a69bb7:3" 282 | end_node_id = 3 283 | 284 | class RelationshipModelClass(RelationshipModel): 285 | a: str = "a" 286 | b: int = 1 287 | c: bool = True 288 | 289 | setattr(RelationshipModelClass, "_client", None) 290 | 291 | relationship_model = RelationshipModelClass() 292 | setattr(relationship_model, "_id", id_) 293 | setattr(relationship_model, "_element_id", element_id) 294 | setattr(relationship_model, "_start_node_element_id", start_node_element_id) 295 | setattr(relationship_model, "_start_node_id", start_node_id) 296 | setattr(relationship_model, "_end_node_element_id", end_node_element_id) 297 | setattr(relationship_model, "_end_node_id", end_node_id) 298 | 299 | assert get_model_dump(relationship_model, exclude={"id"}) == { 300 | "a": "a", 301 | "b": 1, 302 | "c": True, 303 | "element_id": element_id, 304 | "start_node_element_id": start_node_element_id, 305 | "start_node_id": start_node_id, 306 | "end_node_element_id": end_node_element_id, 307 | "end_node_id": end_node_id, 308 | } 309 | assert get_model_dump(relationship_model, exclude={"element_id"}) == { 310 | "a": "a", 311 | "b": 1, 312 | "c": True, 313 | "id": id_, 314 | "start_node_element_id": start_node_element_id, 315 | "start_node_id": start_node_id, 316 | "end_node_element_id": end_node_element_id, 317 | "end_node_id": end_node_id, 318 | } 319 | assert get_model_dump(relationship_model, exclude={"start_node_element_id"}) == { 320 | "a": "a", 321 | "b": 1, 322 | "c": True, 323 | "id": id_, 324 | "element_id": element_id, 325 | "start_node_id": start_node_id, 326 | "end_node_element_id": end_node_element_id, 327 | "end_node_id": end_node_id, 328 | } 329 | assert get_model_dump(relationship_model, exclude={"start_node_id"}) == { 330 | "a": "a", 331 | "b": 1, 332 | "c": True, 333 | "id": id_, 334 | "element_id": element_id, 335 | "start_node_element_id": start_node_element_id, 336 | "end_node_element_id": end_node_element_id, 337 | "end_node_id": end_node_id, 338 | } 339 | assert get_model_dump(relationship_model, exclude={"end_node_element_id"}) == { 340 | "a": "a", 341 | "b": 1, 342 | "c": True, 343 | "id": id_, 344 | "element_id": element_id, 345 | "start_node_element_id": start_node_element_id, 346 | "start_node_id": start_node_id, 347 | "end_node_id": end_node_id, 348 | } 349 | assert get_model_dump(relationship_model, exclude={"end_node_id"}) == { 350 | "a": "a", 351 | "b": 1, 352 | "c": True, 353 | "id": id_, 354 | "element_id": element_id, 355 | "start_node_element_id": start_node_element_id, 356 | "start_node_id": start_node_id, 357 | "end_node_element_id": end_node_element_id, 358 | } 359 | 360 | assert json.loads(get_model_dump_json(relationship_model, exclude={"id"})) == { 361 | "a": "a", 362 | "b": 1, 363 | "c": True, 364 | "element_id": element_id, 365 | "start_node_element_id": start_node_element_id, 366 | "start_node_id": start_node_id, 367 | "end_node_element_id": end_node_element_id, 368 | "end_node_id": end_node_id, 369 | } 370 | assert json.loads(get_model_dump_json(relationship_model, exclude={"element_id"})) == { 371 | "a": "a", 372 | "b": 1, 373 | "c": True, 374 | "id": id_, 375 | "start_node_element_id": start_node_element_id, 376 | "start_node_id": start_node_id, 377 | "end_node_element_id": end_node_element_id, 378 | "end_node_id": end_node_id, 379 | } 380 | assert json.loads(get_model_dump_json(relationship_model, exclude={"start_node_element_id"})) == { 381 | "a": "a", 382 | "b": 1, 383 | "c": True, 384 | "id": id_, 385 | "element_id": element_id, 386 | "start_node_id": start_node_id, 387 | "end_node_element_id": end_node_element_id, 388 | "end_node_id": end_node_id, 389 | } 390 | assert json.loads(get_model_dump_json(relationship_model, exclude={"start_node_id"})) == { 391 | "a": "a", 392 | "b": 1, 393 | "c": True, 394 | "id": id_, 395 | "element_id": element_id, 396 | "start_node_element_id": start_node_element_id, 397 | "end_node_element_id": end_node_element_id, 398 | "end_node_id": end_node_id, 399 | } 400 | assert json.loads(get_model_dump_json(relationship_model, exclude={"end_node_element_id"})) == { 401 | "a": "a", 402 | "b": 1, 403 | "c": True, 404 | "id": id_, 405 | "element_id": element_id, 406 | "start_node_element_id": start_node_element_id, 407 | "start_node_id": start_node_id, 408 | "end_node_id": end_node_id, 409 | } 410 | assert json.loads(get_model_dump_json(relationship_model, exclude={"end_node_id"})) == { 411 | "a": "a", 412 | "b": 1, 413 | "c": True, 414 | "id": id_, 415 | "element_id": element_id, 416 | "start_node_element_id": start_node_element_id, 417 | "start_node_id": start_node_id, 418 | "end_node_element_id": end_node_element_id, 419 | } 420 | -------------------------------------------------------------------------------- /tests/fields/test_property_options.py: -------------------------------------------------------------------------------- 1 | # pylint: disable=unused-argument, unused-import, redefined-outer-name, protected-access, missing-module-docstring, missing-class-docstring 2 | # pyright: reportGeneralTypeIssues=false 3 | 4 | from pyneo4j_ogm.fields.property_options import WithOptions 5 | 6 | 7 | def test_with_options_returns_subclass_of_provided_type(): 8 | class MyProperty: 9 | pass 10 | 11 | MyPropertyWithOptions = WithOptions(MyProperty) 12 | 13 | assert issubclass(MyPropertyWithOptions, MyProperty) 14 | 15 | 16 | def test_with_options_sets_range_index_attribute(): 17 | class MyProperty: 18 | pass 19 | 20 | MyPropertyWithOptions = WithOptions(MyProperty, range_index=True) 21 | 22 | assert getattr(MyPropertyWithOptions, "_range_index") is True 23 | 24 | 25 | def test_with_options_sets_text_index_attribute(): 26 | class MyProperty: 27 | pass 28 | 29 | MyPropertyWithOptions = WithOptions(MyProperty, text_index=True) 30 | 31 | assert getattr(MyPropertyWithOptions, "_text_index") is True 32 | 33 | 34 | def test_with_options_sets_point_index_attribute(): 35 | class MyProperty: 36 | pass 37 | 38 | MyPropertyWithOptions = WithOptions(MyProperty, point_index=True) 39 | 40 | assert getattr(MyPropertyWithOptions, "_point_index") is True 41 | 42 | 43 | def test_with_options_sets_unique_attribute(): 44 | class MyProperty: 45 | pass 46 | 47 | MyPropertyWithOptions = WithOptions(MyProperty, unique=True) 48 | 49 | assert getattr(MyPropertyWithOptions, "_unique") is True 50 | -------------------------------------------------------------------------------- /tests/fields/test_settings.py: -------------------------------------------------------------------------------- 1 | # pylint: disable=unused-argument, unused-import, redefined-outer-name, protected-access, missing-module-docstring, missing-class-docstring 2 | # pyright: reportGeneralTypeIssues=false 3 | 4 | from pyneo4j_ogm.core.node import NodeModel 5 | from pyneo4j_ogm.core.relationship import RelationshipModel 6 | from pyneo4j_ogm.fields.settings import ( 7 | BaseModelSettings, 8 | NodeModelSettings, 9 | RelationshipModelSettings, 10 | _normalize_hooks, 11 | ) 12 | 13 | 14 | def hook_function(): 15 | pass 16 | 17 | 18 | def test_normalize_hooks_validator(): 19 | hooks = { 20 | "hook_list": [hook_function, "not a function"], 21 | "hook_function": hook_function, 22 | "hook_not_callable": "not a function", 23 | } 24 | 25 | normalized_hooks = _normalize_hooks(hooks) 26 | 27 | assert normalized_hooks == { 28 | "hook_list": [hook_function], 29 | "hook_function": [hook_function], 30 | } 31 | 32 | 33 | def test_base_model_settings(): 34 | settings = BaseModelSettings() 35 | 36 | assert not settings.pre_hooks 37 | assert not settings.post_hooks 38 | 39 | 40 | def test_node_model_settings(): 41 | settings = NodeModelSettings() 42 | 43 | assert settings.labels == set() 44 | assert settings.auto_fetch_nodes is None 45 | assert not settings.pre_hooks 46 | assert not settings.post_hooks 47 | 48 | 49 | def test_node_model_settings_inheritance(): 50 | class A(NodeModel): 51 | class Settings: 52 | auto_fetch_nodes = True 53 | post_hooks = { 54 | "save": lambda x: print("post save hook"), 55 | "delete": [lambda x: print("post delete hook"), None], 56 | } 57 | pre_hooks = { 58 | "delete": [lambda x: print("pre delete hook"), lambda x: print("pre delete hook")], 59 | } 60 | labels = {"A"} 61 | 62 | class B(A): 63 | class Settings: 64 | labels = {"B"} 65 | pre_hooks = {"save": lambda x: print("pre save hook B"), "delete": lambda x: print("pre delete hook")} 66 | post_hooks = {"delete": lambda x: print("post delete hook B")} 67 | 68 | assert getattr(A, "_settings", None) is not None 69 | assert isinstance(getattr(A, "_settings", None), NodeModelSettings) 70 | assert A._settings.labels == {"A"} 71 | assert A._settings.auto_fetch_nodes is True 72 | assert "delete" in A._settings.pre_hooks 73 | assert len(A._settings.pre_hooks["delete"]) == 2 74 | assert "save" in A._settings.post_hooks 75 | assert len(A._settings.post_hooks["save"]) == 1 76 | assert "delete" in A._settings.post_hooks 77 | assert len(A._settings.post_hooks["delete"]) == 1 78 | 79 | assert getattr(B, "_settings", None) is not None 80 | assert isinstance(getattr(B, "_settings", None), NodeModelSettings) 81 | assert B._settings.labels == {"A", "B"} 82 | assert B._settings.auto_fetch_nodes is True 83 | assert "save" in B._settings.pre_hooks 84 | assert len(B._settings.pre_hooks["save"]) == 1 85 | assert "delete" in B._settings.pre_hooks 86 | assert len(B._settings.pre_hooks["delete"]) == 3 87 | assert "save" in B._settings.post_hooks 88 | assert len(B._settings.post_hooks["save"]) == 1 89 | assert "delete" in B._settings.post_hooks 90 | assert len(B._settings.post_hooks["delete"]) == 2 91 | 92 | 93 | def test_relationship_model_settings(): 94 | settings = RelationshipModelSettings() 95 | 96 | assert not settings.pre_hooks 97 | assert not settings.post_hooks 98 | assert settings.type is None 99 | 100 | 101 | def test_relationship_model_settings_inheritance(): 102 | class A(RelationshipModel): 103 | class Settings: 104 | post_hooks = { 105 | "save": lambda x: print("post save hook"), 106 | "delete": [lambda x: print("post delete hook"), None], 107 | } 108 | pre_hooks = { 109 | "delete": [lambda x: print("pre delete hook"), lambda x: print("pre delete hook")], 110 | } 111 | type = "A" 112 | 113 | class B(A): 114 | class Settings: 115 | type = "B" 116 | pre_hooks = {"save": lambda x: print("pre save hook B"), "delete": lambda x: print("pre delete hook")} 117 | post_hooks = {"delete": lambda x: print("post delete hook B")} 118 | 119 | assert getattr(A, "_settings", None) is not None 120 | assert isinstance(getattr(A, "_settings", None), RelationshipModelSettings) 121 | assert A._settings.type == "A" 122 | assert "delete" in A._settings.pre_hooks 123 | assert len(A._settings.pre_hooks["delete"]) == 2 124 | assert "save" in A._settings.post_hooks 125 | assert len(A._settings.post_hooks["save"]) == 1 126 | assert "delete" in A._settings.post_hooks 127 | assert len(A._settings.post_hooks["delete"]) == 1 128 | 129 | assert getattr(B, "_settings", None) is not None 130 | assert isinstance(getattr(B, "_settings", None), RelationshipModelSettings) 131 | assert B._settings.type == "B" 132 | assert "save" in B._settings.pre_hooks 133 | assert len(B._settings.pre_hooks["save"]) == 1 134 | assert "delete" in B._settings.pre_hooks 135 | assert len(B._settings.pre_hooks["delete"]) == 3 136 | assert "save" in B._settings.post_hooks 137 | assert len(B._settings.post_hooks["save"]) == 1 138 | assert "delete" in B._settings.post_hooks 139 | assert len(B._settings.post_hooks["delete"]) == 2 140 | 141 | 142 | def test_node_model_label_inheritance(): 143 | class A(NodeModel): 144 | class Settings: 145 | labels = {"A"} 146 | 147 | class B(A): 148 | class Settings: 149 | labels = {"B"} 150 | 151 | class C(A): 152 | pass 153 | 154 | class AnotherClass(B): 155 | pass 156 | 157 | class NotInherited(NodeModel): 158 | class Settings: 159 | labels = {"A", "B"} 160 | 161 | assert getattr(A, "_settings", None) is not None 162 | assert isinstance(getattr(A, "_settings", None), NodeModelSettings) 163 | assert A._settings.labels == {"A"} 164 | 165 | assert getattr(B, "_settings", None) is not None 166 | assert isinstance(getattr(B, "_settings", None), NodeModelSettings) 167 | assert B._settings.labels == {"A", "B"} 168 | 169 | assert getattr(C, "_settings", None) is not None 170 | assert isinstance(getattr(C, "_settings", None), NodeModelSettings) 171 | assert C._settings.labels == {"A", "C"} 172 | 173 | assert getattr(AnotherClass, "_settings", None) is not None 174 | assert isinstance(getattr(AnotherClass, "_settings", None), NodeModelSettings) 175 | assert AnotherClass._settings.labels == {"A", "B", "AnotherClass"} 176 | 177 | assert getattr(NotInherited, "_settings", None) is not None 178 | assert isinstance(getattr(NotInherited, "_settings", None), NodeModelSettings) 179 | assert NotInherited._settings.labels == {"A", "B"} 180 | -------------------------------------------------------------------------------- /tests/fixtures/db_setup.py: -------------------------------------------------------------------------------- 1 | """ 2 | Fixture for setup/teardown of a Neo4j database for integration tests. 3 | """ 4 | 5 | # pylint: disable=redefined-outer-name, missing-class-docstring 6 | 7 | from typing import Any, Dict, List 8 | 9 | import pytest 10 | from neo4j import AsyncGraphDatabase, AsyncSession 11 | from neo4j.graph import Node 12 | 13 | from pyneo4j_ogm import ( 14 | NodeModel, 15 | Pyneo4jClient, 16 | RelationshipModel, 17 | RelationshipProperty, 18 | RelationshipPropertyCardinality, 19 | RelationshipPropertyDirection, 20 | ) 21 | from pyneo4j_ogm.pydantic_utils import IS_PYDANTIC_V2 22 | 23 | 24 | class Developer(NodeModel): 25 | uid: int 26 | name: str 27 | age: int 28 | 29 | colleagues: RelationshipProperty["Developer", "WorkedWith"] = RelationshipProperty( 30 | target_model="Developer", 31 | relationship_model="WorkedWith", 32 | direction=RelationshipPropertyDirection.OUTGOING, 33 | cardinality=RelationshipPropertyCardinality.ZERO_OR_MORE, 34 | allow_multiple=True, 35 | ) 36 | coffee: RelationshipProperty["Coffee", "Consumed"] = RelationshipProperty( 37 | target_model="Coffee", 38 | relationship_model="Consumed", 39 | direction=RelationshipPropertyDirection.OUTGOING, 40 | cardinality=RelationshipPropertyCardinality.ZERO_OR_MORE, 41 | allow_multiple=False, 42 | ) 43 | 44 | 45 | class Coffee(NodeModel): 46 | flavor: str 47 | sugar: bool 48 | milk: bool 49 | note: Dict[str, Any] 50 | 51 | developers: RelationshipProperty["Developer", "Consumed"] = RelationshipProperty( 52 | target_model="Developer", 53 | relationship_model="Consumed", 54 | direction=RelationshipPropertyDirection.INCOMING, 55 | cardinality=RelationshipPropertyCardinality.ZERO_OR_MORE, 56 | allow_multiple=False, 57 | ) 58 | bestseller_for: RelationshipProperty["CoffeeShop", "Bestseller"] = RelationshipProperty( 59 | target_model="CoffeeShop", 60 | relationship_model="Bestseller", 61 | direction=RelationshipPropertyDirection.OUTGOING, 62 | cardinality=RelationshipPropertyCardinality.ZERO_OR_MORE, 63 | allow_multiple=False, 64 | ) 65 | 66 | class Settings: 67 | labels = {"Beverage", "Hot"} 68 | 69 | 70 | class CoffeeShop(NodeModel): 71 | rating: int 72 | tags: List[str] 73 | 74 | coffees: RelationshipProperty["Coffee", "Sells"] = RelationshipProperty( 75 | target_model="Coffee", 76 | relationship_model="Sells", 77 | direction=RelationshipPropertyDirection.OUTGOING, 78 | cardinality=RelationshipPropertyCardinality.ZERO_OR_MORE, 79 | allow_multiple=False, 80 | ) 81 | bestseller: RelationshipProperty["Coffee", "Bestseller"] = RelationshipProperty( 82 | target_model="Coffee", 83 | relationship_model="Bestseller", 84 | direction=RelationshipPropertyDirection.INCOMING, 85 | cardinality=RelationshipPropertyCardinality.ZERO_OR_ONE, 86 | allow_multiple=False, 87 | ) 88 | 89 | 90 | class WorkedWith(RelationshipModel): 91 | language: str 92 | 93 | class Settings: 94 | type = "WAS_WORK_BUDDY_WITH" 95 | 96 | 97 | class Consumed(RelationshipModel): 98 | liked: bool 99 | 100 | class Settings: 101 | type = "LIKES_TO_DRINK" 102 | 103 | 104 | class Sells(RelationshipModel): 105 | pass 106 | 107 | 108 | class Bestseller(RelationshipModel): 109 | class Settings: 110 | type = "BESTSELLER_OF" 111 | 112 | 113 | @pytest.fixture 114 | async def client(): 115 | """ 116 | Create a Pyneo4jClient instance from the package for the test session. 117 | """ 118 | client = await Pyneo4jClient().connect("bolt://localhost:7687", auth=("neo4j", "password")) 119 | 120 | # Drop all nodes, indexes, and constraints from the database. 121 | await client.drop_constraints() 122 | await client.drop_indexes() 123 | await client.drop_nodes() 124 | 125 | yield client 126 | 127 | client.models = set() 128 | await client.close() 129 | 130 | 131 | @pytest.fixture 132 | async def session(): 133 | """ 134 | Create a neo4j driver instance for the test session. 135 | """ 136 | driver = AsyncGraphDatabase.driver(uri="bolt://localhost:7687", auth=("neo4j", "password")) 137 | 138 | async with driver.session() as session: 139 | yield session 140 | 141 | await driver.close() 142 | 143 | 144 | @pytest.fixture 145 | async def setup_test_data(client: Pyneo4jClient, session: AsyncSession): 146 | client.models = set() 147 | await client.register_models([Developer, Coffee, CoffeeShop, WorkedWith, Consumed, Sells, Bestseller]) 148 | await session.run( 149 | """ 150 | CREATE (s1:CoffeeShop {rating: 5, tags: ["cozy", "hipster"]}) 151 | CREATE (s2:CoffeeShop {rating: 1, tags: ["chain"]}) 152 | CREATE (s3:CoffeeShop {rating: 3, tags: ["chain", "hipster"]}) 153 | 154 | CREATE (c1:Beverage:Hot {flavor: "Espresso", sugar: false, milk: false, note: '{\"roast\": \"dark\"}'}) 155 | CREATE (c2:Beverage:Hot {flavor: "Latte", sugar: true, milk: true, note: '{\"roast\": \"medium\"}'}) 156 | CREATE (c3:Beverage:Hot {flavor: "Cappuccino", sugar: true, milk: true, note: '{\"roast\": \"medium\"}'}) 157 | CREATE (c4:Beverage:Hot {flavor: "Americano", sugar: false, milk: false, note: '{\"roast\": \"light\"}'}) 158 | CREATE (c5:Beverage:Hot {flavor: "Mocha", sugar: true, milk: true, note: '{\"roast\": \"dark\"}'}) 159 | 160 | CREATE (d1:Developer {uid: 1, name: "John", age: 30}) 161 | CREATE (d2:Developer {uid: 2, name: "Sam", age: 25}) 162 | CREATE (d3:Developer {uid: 3, name: "Alice", age: 27}) 163 | CREATE (d4:Developer {uid: 4, name: "Bob", age: 32}) 164 | 165 | CREATE (s1)-[:SELLS]->(c1) 166 | CREATE (s1)-[:SELLS]->(c4) 167 | CREATE (s1)<-[:BESTSELLER_OF]-(c4) 168 | 169 | CREATE (s2)-[:SELLS]->(c1) 170 | CREATE (s2)-[:SELLS]->(c3) 171 | CREATE (s2)-[:SELLS]->(c5) 172 | CREATE (s2)<-[:BESTSELLER_OF]-(c3) 173 | 174 | CREATE (s3)-[:SELLS]->(c2) 175 | CREATE (s3)-[:SELLS]->(c5) 176 | CREATE (s3)<-[:BESTSELLER_OF]-(c5) 177 | 178 | CREATE (d1)-[:LIKES_TO_DRINK {liked: True}]->(c1) 179 | CREATE (d1)-[:LIKES_TO_DRINK {liked: False}]->(c2) 180 | CREATE (d2)-[:LIKES_TO_DRINK {liked: True}]->(c3) 181 | CREATE (d3)-[:LIKES_TO_DRINK {liked: True}]->(c4) 182 | CREATE (d3)-[:LIKES_TO_DRINK {liked: False}]->(c5) 183 | CREATE (d3)-[:LIKES_TO_DRINK {liked: False}]->(c1) 184 | 185 | CREATE (d1)-[:WAS_WORK_BUDDY_WITH {language: "Python"}]->(d2) 186 | CREATE (d1)-[:WAS_WORK_BUDDY_WITH {language: "Java"}]->(d2) 187 | CREATE (d1)-[:WAS_WORK_BUDDY_WITH {language: "Python"}]->(d3) 188 | CREATE (d2)-[:WAS_WORK_BUDDY_WITH {language: "Lisp"}]->(d4) 189 | CREATE (d3)-[:WAS_WORK_BUDDY_WITH {language: "Javascript"}]->(d1) 190 | CREATE (d3)-[:WAS_WORK_BUDDY_WITH {language: "Javascript"}]->(d4) 191 | CREATE (d4)-[:WAS_WORK_BUDDY_WITH {language: "Go"}]->(d3) 192 | """ 193 | ) 194 | 195 | result = await session.run( 196 | """ 197 | MATCH ()-[r]->() 198 | WITH DISTINCT collect(r) as relationships 199 | MATCH (n) 200 | WITH DISTINCT collect(n) as nodes, relationships 201 | RETURN nodes, relationships 202 | """ 203 | ) 204 | 205 | result_values = await result.values() 206 | await result.consume() 207 | 208 | yield result_values 209 | 210 | client.models = set() 211 | 212 | 213 | @pytest.fixture 214 | def dev_model_instances(setup_test_data): 215 | john: Node = [ 216 | result 217 | for result in setup_test_data[0][0] 218 | if result.labels == Developer.model_settings().labels and result["uid"] == 1 219 | ][0] 220 | sam: Node = [ 221 | result 222 | for result in setup_test_data[0][0] 223 | if result.labels == Developer.model_settings().labels and result["uid"] == 2 224 | ][0] 225 | alice: Node = [ 226 | result 227 | for result in setup_test_data[0][0] 228 | if result.labels == Developer.model_settings().labels and result["uid"] == 3 229 | ][0] 230 | bob: Node = [ 231 | result 232 | for result in setup_test_data[0][0] 233 | if result.labels == Developer.model_settings().labels and result["uid"] == 4 234 | ][0] 235 | 236 | john_model = Developer._inflate(john) 237 | sam_model = Developer._inflate(sam) 238 | alice_model = Developer._inflate(alice) 239 | bob_model = Developer._inflate(bob) 240 | 241 | return john_model, sam_model, alice_model, bob_model 242 | 243 | 244 | @pytest.fixture 245 | def coffee_model_instances(setup_test_data): 246 | latte: Node = [ 247 | result 248 | for result in setup_test_data[0][0] 249 | if result.labels == Coffee.model_settings().labels and result["flavor"] == "Latte" 250 | ][0] 251 | mocha: Node = [ 252 | result 253 | for result in setup_test_data[0][0] 254 | if result.labels == Coffee.model_settings().labels and result["flavor"] == "Mocha" 255 | ][0] 256 | espresso: Node = [ 257 | result 258 | for result in setup_test_data[0][0] 259 | if result.labels == Coffee.model_settings().labels and result["flavor"] == "Espresso" 260 | ][0] 261 | 262 | latte_model = Coffee._inflate(latte) 263 | mocha_model = Coffee._inflate(mocha) 264 | espresso_model = Coffee._inflate(espresso) 265 | 266 | return latte_model, mocha_model, espresso_model 267 | 268 | 269 | @pytest.fixture 270 | def coffee_shop_model_instances(setup_test_data): 271 | rating_five: Node = [ 272 | result 273 | for result in setup_test_data[0][0] 274 | if result.labels == CoffeeShop.model_settings().labels and result["rating"] == 5 275 | ][0] 276 | 277 | rating_five_model = CoffeeShop._inflate(rating_five) 278 | 279 | return (rating_five_model,) 280 | 281 | 282 | if not IS_PYDANTIC_V2: 283 | Developer.update_forward_refs() 284 | Coffee.update_forward_refs() 285 | Consumed.update_forward_refs() 286 | WorkedWith.update_forward_refs() 287 | CoffeeShop.update_forward_refs() 288 | Sells.update_forward_refs() 289 | Bestseller.update_forward_refs() 290 | -------------------------------------------------------------------------------- /tests/fixtures/migrations.py: -------------------------------------------------------------------------------- 1 | """ 2 | Fixture for setup/teardown of unit tests using temporary paths. 3 | """ 4 | 5 | # pylint: disable=redefined-outer-name, unused-import 6 | 7 | import json 8 | import os 9 | from typing import cast 10 | 11 | import pytest 12 | from neo4j import AsyncSession 13 | from typing_extensions import LiteralString 14 | 15 | from pyneo4j_ogm.migrations.utils.defaults import ( 16 | DEFAULT_CONFIG_FILENAME, 17 | DEFAULT_CONFIG_LABELS, 18 | DEFAULT_CONFIG_URI, 19 | DEFAULT_MIGRATION_DIR, 20 | ) 21 | from tests.fixtures.db_setup import session 22 | 23 | MIGRATION_FILE_NAMES = [ 24 | "20240205190143-mig-one", 25 | "20240205190146-mig-two", 26 | "20240205190149-mig-three", 27 | "20240205190152-mig-four", 28 | "20240205190156-mig-five", 29 | ] 30 | MIGRATION_FILE_NODE_NAMES = ["Bob", "Alice", "Charlie", "David", "Cooper"] 31 | MIGRATION_FILE_TEMPLATE = """ 32 | from pyneo4j_ogm.core.client import Pyneo4jClient 33 | 34 | 35 | async def up(client: Pyneo4jClient) -> None: 36 | ''' 37 | Write your `UP migration` here. 38 | ''' 39 | await client.cypher("CREATE (n:Node {{name: '{name}'}})") 40 | 41 | 42 | async def down(client: Pyneo4jClient) -> None: 43 | ''' 44 | Write your `DOWN migration` here. 45 | ''' 46 | await client.cypher("MATCH (n:Node {{name: '{name}'}}) DETACH DELETE n") 47 | """ 48 | 49 | CUSTOM_MIGRATION_DIR = "my_migrations" 50 | CUSTOM_CONFIG_FILENAME = "config.json" 51 | 52 | LAST_APPLIED = 1707158029.012884 53 | APPLIED_MIGRATIONS = [ 54 | {"name": "20240205190143-mig-one", "applied_at": 1707158029.012881}, 55 | {"name": "20240205190146-mig-two", "applied_at": 1707158029.012884}, 56 | ] 57 | 58 | 59 | def insert_migration_config_files(migration_dir_path, config_file_path): 60 | """ 61 | Inserts the migration directory and config file into the given path. 62 | """ 63 | with open(os.path.join(migration_dir_path, ".gitkeep"), "w", encoding="utf-8") as f: 64 | f.write("") 65 | with open(config_file_path, "w", encoding="utf-8") as f: 66 | f.write( 67 | json.dumps( 68 | { 69 | "migration_dir": migration_dir_path, 70 | "neo4j": { 71 | "uri": DEFAULT_CONFIG_URI, 72 | "node_labels": DEFAULT_CONFIG_LABELS, 73 | "options": {"scheme": "basic", "auth": {"username": "neo4j", "password": "password"}}, 74 | }, 75 | } 76 | ) 77 | ) 78 | 79 | 80 | async def insert_migration_nodes_and_files(session_: AsyncSession, migration_dir_path: str): 81 | await session_.run("MATCH (n) DETACH DELETE n") 82 | await session_.run( 83 | cast( 84 | LiteralString, 85 | f""" 86 | CREATE (m:{':'.join(DEFAULT_CONFIG_LABELS)} {{ 87 | updated_at: $updated_at, 88 | applied_migrations: $applied_migrations 89 | }}) 90 | """, 91 | ), 92 | { 93 | "applied_migrations": [json.dumps(migration) for migration in APPLIED_MIGRATIONS], 94 | "updated_at": LAST_APPLIED, 95 | }, 96 | ) 97 | 98 | for migration_file_name, migration_file_node_name in zip(MIGRATION_FILE_NAMES, MIGRATION_FILE_NODE_NAMES): 99 | with open(os.path.join(migration_dir_path, f"{migration_file_name}.py"), "w", encoding="utf-8") as f: 100 | f.write(MIGRATION_FILE_TEMPLATE.format(name=migration_file_node_name)) 101 | 102 | if migration_file_name in [applied_migration["name"] for applied_migration in APPLIED_MIGRATIONS]: 103 | result = await session_.run( 104 | "CREATE (n:Node {name: $name})", 105 | {"name": migration_file_node_name}, 106 | ) 107 | await result.consume() 108 | 109 | 110 | @pytest.fixture 111 | def tmp_cwd(tmp_path): 112 | """ 113 | Fixture for changing the cwd to the temporary path. 114 | """ 115 | original_cwd = os.getcwd() 116 | 117 | try: 118 | os.chdir(tmp_path) 119 | yield tmp_path 120 | finally: 121 | os.chdir(original_cwd) 122 | 123 | 124 | @pytest.fixture 125 | def initialized_migration(tmp_path): 126 | """ 127 | Fixture for changing the cwd to the temporary path. 128 | """ 129 | original_cwd = os.getcwd() 130 | migration_dir_path = os.path.join(tmp_path, DEFAULT_MIGRATION_DIR) 131 | config_file_path = os.path.join(tmp_path, DEFAULT_CONFIG_FILENAME) 132 | 133 | try: 134 | os.chdir(tmp_path) 135 | os.mkdir(migration_dir_path) 136 | insert_migration_config_files(migration_dir_path, config_file_path) 137 | 138 | yield tmp_path 139 | finally: 140 | os.chdir(original_cwd) 141 | 142 | 143 | @pytest.fixture 144 | def initialized_migration_with_custom_path(tmp_path): 145 | """ 146 | Fixture for changing the cwd to the temporary path with a custom config path. 147 | """ 148 | original_cwd = os.getcwd() 149 | migration_dir_path = os.path.join(tmp_path, CUSTOM_MIGRATION_DIR) 150 | config_file_path = os.path.join(tmp_path, CUSTOM_CONFIG_FILENAME) 151 | 152 | try: 153 | os.chdir(tmp_path) 154 | os.mkdir(migration_dir_path) 155 | insert_migration_config_files(migration_dir_path, config_file_path) 156 | 157 | yield tmp_path 158 | finally: 159 | os.chdir(original_cwd) 160 | 161 | 162 | @pytest.fixture 163 | async def insert_migrations(initialized_migration, session): 164 | """ 165 | Fixture for inserting migrations into the database. 166 | """ 167 | await insert_migration_nodes_and_files(session, os.path.join(initialized_migration, DEFAULT_MIGRATION_DIR)) 168 | yield initialized_migration 169 | 170 | 171 | @pytest.fixture 172 | async def insert_migrations_with_custom_path(initialized_migration_with_custom_path, session): 173 | """ 174 | Fixture for inserting migrations into the database with a custom config path. 175 | """ 176 | await insert_migration_nodes_and_files( 177 | session, os.path.join(initialized_migration_with_custom_path, CUSTOM_MIGRATION_DIR) 178 | ) 179 | yield initialized_migration_with_custom_path 180 | -------------------------------------------------------------------------------- /tests/fixtures/models/models_top.py: -------------------------------------------------------------------------------- 1 | from pyneo4j_ogm.core.node import NodeModel 2 | from pyneo4j_ogm.core.relationship import RelationshipModel 3 | 4 | 5 | class ModelOne(NodeModel): 6 | pass 7 | 8 | 9 | class ModelTwo(RelationshipModel): 10 | pass 11 | -------------------------------------------------------------------------------- /tests/fixtures/models/nested/deeply_nested/model_deeply_nested.py: -------------------------------------------------------------------------------- 1 | from pyneo4j_ogm.core.node import NodeModel 2 | from pyneo4j_ogm.core.relationship import RelationshipModel 3 | 4 | 5 | class ModelFive(NodeModel): 6 | pass 7 | 8 | 9 | class ModelSix(RelationshipModel): 10 | pass 11 | -------------------------------------------------------------------------------- /tests/fixtures/models/nested/deeply_nested/other_classes.py: -------------------------------------------------------------------------------- 1 | class NotMePleaseButDeeplyNested: 2 | pass 3 | -------------------------------------------------------------------------------- /tests/fixtures/models/nested/model_nested.py: -------------------------------------------------------------------------------- 1 | from pyneo4j_ogm.core.node import NodeModel 2 | from pyneo4j_ogm.core.relationship import RelationshipModel 3 | 4 | 5 | class ModelThree(NodeModel): 6 | pass 7 | 8 | 9 | class ModelFour(RelationshipModel): 10 | pass 11 | -------------------------------------------------------------------------------- /tests/fixtures/models/nested/no_models.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/groc-prog/pyneo4j-ogm/988b662416d5cace14e7c0d055432a6d5698918e/tests/fixtures/models/nested/no_models.py -------------------------------------------------------------------------------- /tests/fixtures/models/other_classes.py: -------------------------------------------------------------------------------- 1 | class NotMePlease: 2 | pass 3 | -------------------------------------------------------------------------------- /tests/fixtures/operators_builder.py: -------------------------------------------------------------------------------- 1 | """ 2 | Fixture for setup/teardown of unit tests using Operators instance. 3 | """ 4 | import pytest 5 | 6 | from pyneo4j_ogm.queries.operators import Operators 7 | 8 | 9 | @pytest.fixture 10 | def operators_builder(): 11 | """ 12 | Fixture for providing a Operators instance. 13 | """ 14 | builder = Operators() 15 | builder.reset_state() 16 | builder.ref = "n" 17 | 18 | return builder 19 | -------------------------------------------------------------------------------- /tests/fixtures/query_builder.py: -------------------------------------------------------------------------------- 1 | """ 2 | Fixture for setup/teardown of unit tests using QueryBuilder instance. 3 | """ 4 | import pytest 5 | 6 | from pyneo4j_ogm.queries.query_builder import QueryBuilder 7 | 8 | 9 | @pytest.fixture 10 | def query_builder(): 11 | """ 12 | Fixture for providing a QueryBuilder instance. 13 | """ 14 | builder = QueryBuilder() 15 | builder.reset_query() 16 | builder.parameters = {} 17 | 18 | return builder 19 | -------------------------------------------------------------------------------- /tests/migrations/actions/test_create.py: -------------------------------------------------------------------------------- 1 | # pylint: disable=unused-argument, unused-import, redefined-outer-name, protected-access, missing-module-docstring, missing-class-docstring 2 | # pyright: reportGeneralTypeIssues=false 3 | 4 | import os 5 | 6 | import pytest 7 | 8 | from pyneo4j_ogm.exceptions import MigrationNotInitialized 9 | from pyneo4j_ogm.migrations import create 10 | from pyneo4j_ogm.migrations.actions.create import normalize_filename 11 | from pyneo4j_ogm.migrations.utils.defaults import ( 12 | DEFAULT_MIGRATION_DIR, 13 | MIGRATION_TEMPLATE, 14 | ) 15 | from tests.fixtures.migrations import ( 16 | CUSTOM_CONFIG_FILENAME, 17 | CUSTOM_MIGRATION_DIR, 18 | initialized_migration, 19 | initialized_migration_with_custom_path, 20 | tmp_cwd, 21 | ) 22 | 23 | 24 | def test_create(initialized_migration): 25 | file_name = "testmigration" 26 | assert len(os.listdir(os.path.join(initialized_migration, DEFAULT_MIGRATION_DIR))) == 1 27 | 28 | return_value = create(file_name) 29 | assert "name" in return_value 30 | assert "path" in return_value 31 | assert len(os.listdir(os.path.join(initialized_migration, DEFAULT_MIGRATION_DIR))) == 2 32 | 33 | file = [ 34 | file for file in os.listdir(os.path.join(initialized_migration, DEFAULT_MIGRATION_DIR)) if file.endswith(".py") 35 | ][0] 36 | assert file_name in file 37 | 38 | with open(os.path.join(initialized_migration, DEFAULT_MIGRATION_DIR, file), "r", encoding="utf-8") as f: 39 | assert f.read() == MIGRATION_TEMPLATE.format(name=file) 40 | 41 | 42 | def test_fails_if_not_initialized(tmp_cwd): 43 | with pytest.raises(MigrationNotInitialized): 44 | create("testmigration") 45 | 46 | 47 | def test_with_custom_path(initialized_migration_with_custom_path): 48 | file_name = "testmigration" 49 | assert len(os.listdir(os.path.join(initialized_migration_with_custom_path, CUSTOM_MIGRATION_DIR))) == 1 50 | 51 | return_value = create(file_name, os.path.join(initialized_migration_with_custom_path, CUSTOM_CONFIG_FILENAME)) 52 | assert "name" in return_value 53 | assert "path" in return_value 54 | assert len(os.listdir(os.path.join(initialized_migration_with_custom_path, CUSTOM_MIGRATION_DIR))) == 2 55 | 56 | file = [ 57 | file 58 | for file in os.listdir(os.path.join(initialized_migration_with_custom_path, CUSTOM_MIGRATION_DIR)) 59 | if file.endswith(".py") 60 | ][0] 61 | assert file_name in file 62 | 63 | with open( 64 | os.path.join(initialized_migration_with_custom_path, CUSTOM_MIGRATION_DIR, file), "r", encoding="utf-8" 65 | ) as f: 66 | assert f.read() == MIGRATION_TEMPLATE.format(name=file) 67 | 68 | 69 | def test_normalizes_filename(): 70 | assert normalize_filename("TestMigration") == "test_migration" 71 | assert normalize_filename("testMigration") == "test_migration" 72 | assert normalize_filename("test_migration") == "test_migration" 73 | assert normalize_filename("test-migration") == "test_migration" 74 | -------------------------------------------------------------------------------- /tests/migrations/actions/test_down.py: -------------------------------------------------------------------------------- 1 | # pylint: disable=unused-argument, unused-import, redefined-outer-name, protected-access, missing-module-docstring, missing-class-docstring 2 | # pyright: reportGeneralTypeIssues=false 3 | 4 | import json 5 | import os 6 | 7 | import pytest 8 | 9 | from pyneo4j_ogm.exceptions import MigrationNotInitialized 10 | from pyneo4j_ogm.migrations import down 11 | from pyneo4j_ogm.migrations.utils.defaults import DEFAULT_CONFIG_LABELS 12 | from tests.fixtures.db_setup import session 13 | from tests.fixtures.migrations import ( 14 | CUSTOM_CONFIG_FILENAME, 15 | LAST_APPLIED, 16 | MIGRATION_FILE_NAMES, 17 | MIGRATION_FILE_NODE_NAMES, 18 | initialized_migration, 19 | initialized_migration_with_custom_path, 20 | insert_migrations, 21 | insert_migrations_with_custom_path, 22 | tmp_cwd, 23 | ) 24 | 25 | 26 | async def test_down(tmp_cwd, insert_migrations, session): 27 | await down(1) 28 | 29 | result = await session.run(f"MATCH (n:{':'.join(DEFAULT_CONFIG_LABELS)}) RETURN n") 30 | query_results = await result.values() 31 | await result.consume() 32 | 33 | applied_migrations = [json.loads(migration) for migration in query_results[0][0]["applied_migrations"]] 34 | 35 | assert len(query_results) == 1 36 | assert query_results[0][0].labels == set(DEFAULT_CONFIG_LABELS) 37 | assert query_results[0][0]["updated_at"] != LAST_APPLIED 38 | assert len(applied_migrations) == 1 39 | 40 | result = await session.run("MATCH (n:Node) WHERE n.name IN $names RETURN n", {"names": MIGRATION_FILE_NODE_NAMES}) 41 | query_results = await result.values() 42 | await result.consume() 43 | 44 | assert len(query_results) == 1 45 | assert query_results[0][0]["name"] == MIGRATION_FILE_NODE_NAMES[0] 46 | 47 | 48 | async def test_down_count_all(tmp_cwd, insert_migrations, session): 49 | await down("all") 50 | 51 | result = await session.run(f"MATCH (n:{':'.join(DEFAULT_CONFIG_LABELS)}) RETURN n") 52 | query_results = await result.values() 53 | await result.consume() 54 | 55 | applied_migrations = [json.loads(migration) for migration in query_results[0][0]["applied_migrations"]] 56 | 57 | assert len(query_results) == 1 58 | assert query_results[0][0].labels == set(DEFAULT_CONFIG_LABELS) 59 | assert query_results[0][0]["updated_at"] != LAST_APPLIED 60 | assert len(applied_migrations) == 0 61 | 62 | result = await session.run("MATCH (n:Node) WHERE n.name IN $names RETURN n", {"names": MIGRATION_FILE_NODE_NAMES}) 63 | query_results = await result.values() 64 | await result.consume() 65 | 66 | assert len(query_results) == 0 67 | 68 | 69 | async def test_fails_if_not_initialized(tmp_cwd): 70 | with pytest.raises(MigrationNotInitialized): 71 | await down() 72 | 73 | 74 | async def test_with_custom_path(tmp_cwd, insert_migrations_with_custom_path, session): 75 | await down(1, os.path.join(tmp_cwd, CUSTOM_CONFIG_FILENAME)) 76 | 77 | result = await session.run(f"MATCH (n:{':'.join(DEFAULT_CONFIG_LABELS)}) RETURN n") 78 | query_results = await result.values() 79 | await result.consume() 80 | 81 | applied_migrations = [json.loads(migration) for migration in query_results[0][0]["applied_migrations"]] 82 | 83 | assert len(query_results) == 1 84 | assert query_results[0][0].labels == set(DEFAULT_CONFIG_LABELS) 85 | assert query_results[0][0]["updated_at"] != LAST_APPLIED 86 | assert len(applied_migrations) == 1 87 | 88 | result = await session.run("MATCH (n:Node) WHERE n.name IN $names RETURN n", {"names": MIGRATION_FILE_NODE_NAMES}) 89 | query_results = await result.values() 90 | await result.consume() 91 | 92 | assert len(query_results) == 1 93 | assert query_results[0][0]["name"] == MIGRATION_FILE_NODE_NAMES[0] 94 | -------------------------------------------------------------------------------- /tests/migrations/actions/test_init.py: -------------------------------------------------------------------------------- 1 | # pylint: disable=unused-argument, unused-import, redefined-outer-name, protected-access, missing-module-docstring, missing-class-docstring 2 | # pyright: reportGeneralTypeIssues=false 3 | 4 | import json 5 | import os 6 | 7 | import pytest 8 | from pydantic import ValidationError 9 | 10 | from pyneo4j_ogm.migrations import init 11 | from pyneo4j_ogm.migrations.utils.defaults import ( 12 | DEFAULT_CONFIG_FILENAME, 13 | DEFAULT_CONFIG_LABELS, 14 | DEFAULT_CONFIG_URI, 15 | DEFAULT_MIGRATION_DIR, 16 | ) 17 | from tests.fixtures.migrations import tmp_cwd 18 | 19 | 20 | def test_init(tmp_cwd): 21 | migration_dir_path = os.path.join(tmp_cwd, DEFAULT_MIGRATION_DIR) 22 | config_file_path = os.path.join(tmp_cwd, DEFAULT_CONFIG_FILENAME) 23 | 24 | init() 25 | 26 | assert os.path.isdir(migration_dir_path) 27 | assert os.path.isfile(os.path.join(migration_dir_path, ".gitkeep")) 28 | assert os.path.isfile(config_file_path) 29 | 30 | with open(config_file_path, "r", encoding="utf-8") as f: 31 | config = json.load(f) 32 | 33 | assert "migration_dir" in config 34 | assert config["migration_dir"] == DEFAULT_MIGRATION_DIR 35 | assert "neo4j" in config 36 | assert "uri" in config["neo4j"] 37 | assert config["neo4j"]["uri"] == DEFAULT_CONFIG_URI 38 | assert "node_labels" in config["neo4j"] 39 | assert config["neo4j"]["node_labels"] == DEFAULT_CONFIG_LABELS 40 | assert "options" in config["neo4j"] 41 | assert config["neo4j"]["options"] == {} 42 | 43 | 44 | def test_existing_migration_dir(tmp_cwd): 45 | migration_dir_path = os.path.join(tmp_cwd, DEFAULT_MIGRATION_DIR) 46 | config_file_path = os.path.join(tmp_cwd, DEFAULT_CONFIG_FILENAME) 47 | os.mkdir(migration_dir_path) 48 | 49 | init() 50 | 51 | with open(config_file_path, "r", encoding="utf-8") as f: 52 | config = json.load(f) 53 | 54 | assert "migration_dir" in config 55 | assert config["migration_dir"] == DEFAULT_MIGRATION_DIR 56 | assert "neo4j" in config 57 | assert "uri" in config["neo4j"] 58 | assert config["neo4j"]["uri"] == DEFAULT_CONFIG_URI 59 | assert "node_labels" in config["neo4j"] 60 | assert config["neo4j"]["node_labels"] == DEFAULT_CONFIG_LABELS 61 | assert "options" in config["neo4j"] 62 | assert config["neo4j"]["options"] == {} 63 | 64 | 65 | def test_existing_config_file(tmp_cwd): 66 | custom_migration_dir_path = "foo/bar" 67 | config_file_path = os.path.join(tmp_cwd, DEFAULT_CONFIG_FILENAME) 68 | 69 | with open(config_file_path, "w", encoding="utf-8") as f: 70 | f.write( 71 | json.dumps( 72 | { 73 | "migration_dir": custom_migration_dir_path, 74 | "neo4j": {"uri": DEFAULT_CONFIG_URI, "node_labels": DEFAULT_CONFIG_LABELS, "options": {}}, 75 | } 76 | ) 77 | ) 78 | 79 | init() 80 | 81 | migration_dir_path = os.path.join(tmp_cwd, custom_migration_dir_path) 82 | assert os.path.isdir(migration_dir_path) 83 | assert os.path.isfile(os.path.join(migration_dir_path, ".gitkeep")) 84 | assert os.path.isfile(config_file_path) 85 | 86 | 87 | def test_invalid_existing_config(tmp_cwd): 88 | config_file_path = os.path.join(tmp_cwd, DEFAULT_CONFIG_FILENAME) 89 | 90 | with open(config_file_path, "w", encoding="utf-8") as f: 91 | f.write( 92 | json.dumps( 93 | { 94 | "migration_dir": DEFAULT_MIGRATION_DIR, 95 | "neo4j": {"node_labels": DEFAULT_CONFIG_LABELS, "options": {}}, 96 | } 97 | ) 98 | ) 99 | 100 | with pytest.raises(ValidationError): 101 | init() 102 | -------------------------------------------------------------------------------- /tests/migrations/actions/test_status.py: -------------------------------------------------------------------------------- 1 | # pylint: disable=unused-argument, unused-import, redefined-outer-name, protected-access, missing-module-docstring, missing-class-docstring 2 | # pyright: reportGeneralTypeIssues=false 3 | 4 | import json 5 | import os 6 | 7 | import pytest 8 | 9 | from pyneo4j_ogm.exceptions import MigrationNotInitialized 10 | from pyneo4j_ogm.migrations import status 11 | from pyneo4j_ogm.migrations.utils.defaults import DEFAULT_CONFIG_LABELS 12 | from tests.fixtures.db_setup import session 13 | from tests.fixtures.migrations import ( 14 | APPLIED_MIGRATIONS, 15 | CUSTOM_CONFIG_FILENAME, 16 | LAST_APPLIED, 17 | MIGRATION_FILE_NAMES, 18 | MIGRATION_FILE_NODE_NAMES, 19 | initialized_migration, 20 | initialized_migration_with_custom_path, 21 | insert_migrations, 22 | insert_migrations_with_custom_path, 23 | tmp_cwd, 24 | ) 25 | 26 | 27 | async def test_status(tmp_cwd, insert_migrations, session): 28 | result = await status() 29 | 30 | assert len(result) == 5 31 | assert result[0] == { 32 | "name": APPLIED_MIGRATIONS[0]["name"], 33 | "applied_at": APPLIED_MIGRATIONS[0]["applied_at"], 34 | "status": "APPLIED", 35 | } 36 | assert result[1] == { 37 | "name": APPLIED_MIGRATIONS[1]["name"], 38 | "applied_at": APPLIED_MIGRATIONS[1]["applied_at"], 39 | "status": "APPLIED", 40 | } 41 | assert result[2] == {"name": MIGRATION_FILE_NAMES[2], "applied_at": None, "status": "PENDING"} 42 | assert result[3] == {"name": MIGRATION_FILE_NAMES[3], "applied_at": None, "status": "PENDING"} 43 | assert result[4] == {"name": MIGRATION_FILE_NAMES[4], "applied_at": None, "status": "PENDING"} 44 | 45 | 46 | async def test_fails_if_not_initialized(tmp_cwd): 47 | with pytest.raises(MigrationNotInitialized): 48 | await status() 49 | 50 | 51 | async def test_with_custom_path(tmp_cwd, insert_migrations_with_custom_path, session): 52 | result = await status(os.path.join(tmp_cwd, CUSTOM_CONFIG_FILENAME)) 53 | 54 | assert len(result) == 5 55 | assert result[0] == { 56 | "name": APPLIED_MIGRATIONS[0]["name"], 57 | "applied_at": APPLIED_MIGRATIONS[0]["applied_at"], 58 | "status": "APPLIED", 59 | } 60 | assert result[1] == { 61 | "name": APPLIED_MIGRATIONS[1]["name"], 62 | "applied_at": APPLIED_MIGRATIONS[1]["applied_at"], 63 | "status": "APPLIED", 64 | } 65 | assert result[2] == {"name": MIGRATION_FILE_NAMES[2], "applied_at": None, "status": "PENDING"} 66 | assert result[3] == {"name": MIGRATION_FILE_NAMES[3], "applied_at": None, "status": "PENDING"} 67 | assert result[4] == {"name": MIGRATION_FILE_NAMES[4], "applied_at": None, "status": "PENDING"} 68 | -------------------------------------------------------------------------------- /tests/migrations/actions/test_up.py: -------------------------------------------------------------------------------- 1 | # pylint: disable=unused-argument, unused-import, redefined-outer-name, protected-access, missing-module-docstring, missing-class-docstring 2 | # pyright: reportGeneralTypeIssues=false 3 | 4 | import json 5 | import os 6 | 7 | import pytest 8 | 9 | from pyneo4j_ogm.exceptions import MigrationNotInitialized 10 | from pyneo4j_ogm.migrations import up 11 | from pyneo4j_ogm.migrations.utils.defaults import DEFAULT_CONFIG_LABELS 12 | from tests.fixtures.db_setup import session 13 | from tests.fixtures.migrations import ( 14 | CUSTOM_CONFIG_FILENAME, 15 | LAST_APPLIED, 16 | MIGRATION_FILE_NAMES, 17 | MIGRATION_FILE_NODE_NAMES, 18 | initialized_migration, 19 | initialized_migration_with_custom_path, 20 | insert_migrations, 21 | insert_migrations_with_custom_path, 22 | tmp_cwd, 23 | ) 24 | 25 | 26 | async def test_up(tmp_cwd, insert_migrations, session): 27 | await up(1) 28 | 29 | result = await session.run(f"MATCH (n:{':'.join(DEFAULT_CONFIG_LABELS)}) RETURN n") 30 | query_results = await result.values() 31 | await result.consume() 32 | 33 | applied_migrations = [json.loads(migration) for migration in query_results[0][0]["applied_migrations"]] 34 | 35 | assert len(query_results) == 1 36 | assert query_results[0][0].labels == set(DEFAULT_CONFIG_LABELS) 37 | assert query_results[0][0]["updated_at"] != LAST_APPLIED 38 | assert len(applied_migrations) == 3 39 | assert applied_migrations[1]["name"] == MIGRATION_FILE_NAMES[1] 40 | 41 | result = await session.run("MATCH (n:Node) WHERE n.name IN $names RETURN n", {"names": MIGRATION_FILE_NODE_NAMES}) 42 | query_results = await result.values() 43 | await result.consume() 44 | 45 | assert len(query_results) == 3 46 | assert query_results[0][0]["name"] in MIGRATION_FILE_NODE_NAMES[:-2] 47 | assert query_results[1][0]["name"] in MIGRATION_FILE_NODE_NAMES[:-2] 48 | assert query_results[2][0]["name"] in MIGRATION_FILE_NODE_NAMES[:-2] 49 | 50 | 51 | async def test_up_count_all(tmp_cwd, insert_migrations, session): 52 | await up("all") 53 | 54 | result = await session.run(f"MATCH (n:{':'.join(DEFAULT_CONFIG_LABELS)}) RETURN n") 55 | query_results = await result.values() 56 | await result.consume() 57 | 58 | applied_migrations = [json.loads(migration) for migration in query_results[0][0]["applied_migrations"]] 59 | 60 | assert len(query_results) == 1 61 | assert query_results[0][0].labels == set(DEFAULT_CONFIG_LABELS) 62 | assert query_results[0][0]["updated_at"] != LAST_APPLIED 63 | assert len(applied_migrations) == 5 64 | 65 | for index, migration_name in enumerate(MIGRATION_FILE_NAMES): 66 | assert applied_migrations[index]["name"] == migration_name 67 | 68 | result = await session.run("MATCH (n:Node) WHERE n.name IN $names RETURN n", {"names": MIGRATION_FILE_NODE_NAMES}) 69 | query_results = await result.values() 70 | await result.consume() 71 | 72 | assert len(query_results) == 5 73 | 74 | 75 | async def test_fails_if_not_initialized(tmp_cwd): 76 | with pytest.raises(MigrationNotInitialized): 77 | await up() 78 | 79 | 80 | async def test_with_custom_path(tmp_cwd, insert_migrations_with_custom_path, session): 81 | await up(1, os.path.join(tmp_cwd, CUSTOM_CONFIG_FILENAME)) 82 | 83 | result = await session.run(f"MATCH (n:{':'.join(DEFAULT_CONFIG_LABELS)}) RETURN n") 84 | query_results = await result.values() 85 | await result.consume() 86 | 87 | applied_migrations = [json.loads(migration) for migration in query_results[0][0]["applied_migrations"]] 88 | 89 | assert len(query_results) == 1 90 | assert query_results[0][0].labels == set(DEFAULT_CONFIG_LABELS) 91 | assert query_results[0][0]["updated_at"] != LAST_APPLIED 92 | assert len(applied_migrations) == 3 93 | assert applied_migrations[1]["name"] == MIGRATION_FILE_NAMES[1] 94 | 95 | result = await session.run("MATCH (n:Node) WHERE n.name IN $names RETURN n", {"names": MIGRATION_FILE_NODE_NAMES}) 96 | query_results = await result.values() 97 | await result.consume() 98 | 99 | assert len(query_results) == 3 100 | assert query_results[0][0]["name"] in MIGRATION_FILE_NODE_NAMES[:-2] 101 | assert query_results[1][0]["name"] in MIGRATION_FILE_NODE_NAMES[:-2] 102 | assert query_results[2][0]["name"] in MIGRATION_FILE_NODE_NAMES[:-2] 103 | -------------------------------------------------------------------------------- /tests/queries/test_validators.py: -------------------------------------------------------------------------------- 1 | # pylint: disable=missing-module-docstring, missing-class-docstring 2 | 3 | from pydantic import BaseModel 4 | from pydantic.class_validators import root_validator 5 | 6 | from pyneo4j_ogm.pydantic_utils import IS_PYDANTIC_V2, parse_model 7 | from pyneo4j_ogm.queries.validators import ( 8 | QueryOptionModel, 9 | _normalize_fields, 10 | _normalize_labels, 11 | ) 12 | 13 | if IS_PYDANTIC_V2: 14 | from pydantic import model_validator 15 | 16 | 17 | def test_normalize_fields_validator(): 18 | class TestModel(BaseModel): 19 | attr: str 20 | 21 | if IS_PYDANTIC_V2: 22 | normalize_and_validate_fields = model_validator(mode="after")(_normalize_fields) 23 | else: 24 | normalize_and_validate_fields = root_validator(allow_reuse=True)(_normalize_fields) 25 | 26 | if IS_PYDANTIC_V2: 27 | model_config = { 28 | "extra": "allow", 29 | "populate_by_name": True, 30 | } 31 | else: 32 | 33 | class Config: 34 | extra = "allow" 35 | 36 | test_model = parse_model( 37 | TestModel, 38 | { 39 | "attr": "bar", 40 | "invalid_field": "value", 41 | "valid_field": {"$eq": "value"}, 42 | "invalid_operator": {"$invalid": "value"}, 43 | }, 44 | ) 45 | 46 | assert hasattr(test_model, "attr") 47 | assert hasattr(test_model, "valid_field") 48 | assert not hasattr(test_model, "invalid_field") 49 | assert not hasattr(test_model, "invalid_operator") 50 | 51 | 52 | def test_normalize_labels_validator(): 53 | class TestModel(BaseModel): 54 | pass 55 | 56 | assert _normalize_labels(TestModel, None) is None 57 | assert _normalize_labels(TestModel, "label") == ["label"] 58 | assert _normalize_labels(TestModel, ["label1", "label2"]) == ["label1", "label2"] 59 | 60 | 61 | def test_query_options_sort(): 62 | options = QueryOptionModel(sort="name") 63 | assert options.sort == ["name"] 64 | 65 | options = QueryOptionModel(sort=["name", "age"]) 66 | assert options.sort == ["name", "age"] 67 | -------------------------------------------------------------------------------- /tests/utils/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/groc-prog/pyneo4j-ogm/988b662416d5cace14e7c0d055432a6d5698918e/tests/utils/__init__.py -------------------------------------------------------------------------------- /tests/utils/string_utils.py: -------------------------------------------------------------------------------- 1 | """ 2 | Utility functions for asserting string tests. 3 | """ 4 | from typing import Optional 5 | 6 | 7 | def assert_string_equality(actual: Optional[str], expected: str) -> None: 8 | """ 9 | Assert that two strings are equal after removing all whitespace. 10 | 11 | Args: 12 | actual (str | None): The actual string. 13 | expected (str): The expected string. 14 | 15 | Raises: 16 | AssertionError: If the actual string is None or the strings are not equal. 17 | """ 18 | if actual is None: 19 | raise AssertionError("The actual string is None.") 20 | 21 | normalized_actual = "".join(actual.split()) 22 | normalized_expected = "".join(expected.split()) 23 | 24 | assert normalized_actual == normalized_expected 25 | --------------------------------------------------------------------------------