├── pydango ├── py.typed ├── connection │ ├── __init__.py │ ├── consts.py │ ├── exceptions.py │ ├── types.py │ ├── client.py │ ├── utils.py │ ├── query_utils.py │ ├── session.py │ └── graph_utils.py ├── orm │ ├── consts.py │ ├── models │ │ ├── sentinel.py │ │ ├── shapes.py │ │ ├── __init__.py │ │ ├── utils.py │ │ ├── types.py │ │ ├── relations.py │ │ ├── edge.py │ │ ├── fields.py │ │ └── vertex.py │ ├── __init__.py │ ├── utils.py │ └── encoders.py ├── query │ ├── types.py │ ├── consts.py │ ├── utils.py │ ├── __init__.py │ ├── options.py │ └── functions.py ├── __init__.py ├── utils.py └── indexes.py ├── tests ├── __init__.py ├── session │ ├── __init__.py │ ├── conftest.py │ ├── test_cities.py │ ├── test_social_network.py │ └── test_family.py ├── test_queries │ ├── __init__.py │ ├── utils.py │ ├── conftest.py │ ├── test_queries_integration.py │ ├── test_ecommerce.py │ ├── data.py │ └── ecommerce_queries.py ├── test_bench.py ├── test_errors.py ├── conftest.py └── queries.py ├── .tool-versions ├── stubs └── aioarango │ ├── __init__.pyi │ └── client.pyi ├── docs ├── installation.md ├── session.md ├── orm │ ├── query.md │ ├── vertex.md │ ├── edge.md │ ├── base.md │ ├── vertex2.md │ └── collection.md ├── query │ ├── options.md │ ├── functions.md │ ├── usage.md │ ├── query.md │ ├── operations.md │ └── expressions.md ├── index.md └── quickstart.md ├── LICENSE ├── CHANGELOG.md ├── .run └── pytest for tests.session.test_social_network.run.xml ├── mkdocs.yml ├── pyproject.toml ├── .gitignore ├── .pre-commit-config.yaml ├── README.md └── .github └── workflows └── ci.yml /pydango/py.typed: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/session/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /.tool-versions: -------------------------------------------------------------------------------- 1 | python 3.9.5 2 | -------------------------------------------------------------------------------- /pydango/connection/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/test_queries/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /pydango/orm/consts.py: -------------------------------------------------------------------------------- 1 | EDGES = "edges" 2 | -------------------------------------------------------------------------------- /pydango/connection/consts.py: -------------------------------------------------------------------------------- 1 | from typing import Final 2 | 3 | PYDANGO_SESSION_KEY: Final[str] = "__session__" 4 | -------------------------------------------------------------------------------- /stubs/aioarango/__init__.pyi: -------------------------------------------------------------------------------- 1 | from aioarango.exceptions import * 2 | from aioarango.http import * 3 | from aioarango.client import ArangoClient as ArangoClient 4 | -------------------------------------------------------------------------------- /tests/test_queries/utils.py: -------------------------------------------------------------------------------- 1 | async def execute_query(query, database): 2 | return await database.aql.execute(query.compile(), bind_vars=query.bind_vars) 3 | -------------------------------------------------------------------------------- /pydango/connection/exceptions.py: -------------------------------------------------------------------------------- 1 | class PydangoError(Exception): 2 | pass 3 | 4 | 5 | class SessionNotInitializedError(PydangoError): 6 | pass 7 | 8 | 9 | class DocumentNotFoundError(PydangoError): 10 | pass 11 | -------------------------------------------------------------------------------- /docs/installation.md: -------------------------------------------------------------------------------- 1 | Get started with `pydangorm` by installing it via pip: 2 | 3 | ### **pip** 4 | 5 | ```shell 6 | pip install pydangorm 7 | ``` 8 | 9 | ### **poetry** 10 | 11 | ```shell 12 | poetry add pydangorm 13 | ``` 14 | -------------------------------------------------------------------------------- /pydango/orm/models/sentinel.py: -------------------------------------------------------------------------------- 1 | import dataclasses 2 | 3 | # NOT_SET = LazyFetched() 4 | 5 | 6 | @dataclasses.dataclass 7 | class LazyFetch: 8 | def __init__(self, session, instance): 9 | self.instance = instance 10 | self.session = session 11 | -------------------------------------------------------------------------------- /pydango/query/types.py: -------------------------------------------------------------------------------- 1 | import sys 2 | from typing import TYPE_CHECKING, Union 3 | 4 | if sys.version_info >= (3, 10): 5 | from typing import TypeAlias 6 | else: 7 | from typing_extensions import TypeAlias 8 | 9 | if TYPE_CHECKING: 10 | from pydango.query.expressions import RangeExpression 11 | 12 | Range: TypeAlias = Union["RangeExpression", range, tuple[int, int]] 13 | -------------------------------------------------------------------------------- /pydango/orm/models/shapes.py: -------------------------------------------------------------------------------- 1 | from pydantic.v1.fields import ( 2 | SHAPE_FROZENSET, 3 | SHAPE_ITERABLE, 4 | SHAPE_LIST, 5 | SHAPE_SEQUENCE, 6 | SHAPE_SET, 7 | SHAPE_TUPLE_ELLIPSIS, 8 | ) 9 | 10 | LIST_SHAPES = { 11 | SHAPE_LIST, 12 | SHAPE_TUPLE_ELLIPSIS, 13 | SHAPE_SEQUENCE, 14 | SHAPE_SET, 15 | SHAPE_FROZENSET, 16 | SHAPE_ITERABLE, 17 | } 18 | -------------------------------------------------------------------------------- /pydango/orm/__init__.py: -------------------------------------------------------------------------------- 1 | from .models import ( 2 | EdgeCollectionConfig, 3 | EdgeModel, 4 | Relation, 5 | VertexCollectionConfig, 6 | VertexModel, 7 | ) 8 | from .query import ORMQuery, for_, traverse 9 | 10 | __all__ = [ 11 | "VertexModel", 12 | "EdgeModel", 13 | "EdgeCollectionConfig", 14 | "VertexCollectionConfig", 15 | "ORMQuery", 16 | "Relation", 17 | "for_", 18 | "traverse", 19 | ] 20 | -------------------------------------------------------------------------------- /pydango/orm/models/__init__.py: -------------------------------------------------------------------------------- 1 | from .base import BaseArangoModel, CollectionConfig, CollectionType, Relation 2 | from .edge import EdgeCollectionConfig, EdgeModel 3 | from .vertex import VertexCollectionConfig, VertexModel 4 | 5 | __all__ = [ 6 | "BaseArangoModel", 7 | "EdgeModel", 8 | "VertexModel", 9 | "CollectionConfig", 10 | "CollectionType", 11 | "EdgeCollectionConfig", 12 | "VertexCollectionConfig", 13 | "Relation", 14 | ] 15 | -------------------------------------------------------------------------------- /pydango/orm/utils.py: -------------------------------------------------------------------------------- 1 | import sys 2 | from functools import lru_cache 3 | 4 | from pydantic.v1.typing import evaluate_forwardref 5 | 6 | 7 | def get_globals(cls): 8 | if cls.__module__ in sys.modules: 9 | globalns = sys.modules[cls.__module__].__dict__.copy() 10 | else: 11 | globalns = {} 12 | return globalns 13 | 14 | 15 | @lru_cache 16 | def evaluate_forward_ref(source, model, **localns): 17 | return evaluate_forwardref(model, get_globals(source), localns) 18 | -------------------------------------------------------------------------------- /pydango/orm/models/utils.py: -------------------------------------------------------------------------------- 1 | from typing import TYPE_CHECKING 2 | 3 | from pydango.orm.models.fields import ModelFieldExpression 4 | 5 | if TYPE_CHECKING: 6 | from pydango.orm.models.base import ArangoModel 7 | 8 | 9 | def save_dict(model: "ArangoModel"): 10 | return model.save_dict() 11 | 12 | 13 | def convert_edge_data_to_valid_kwargs(edge_dict): 14 | for i in edge_dict.copy(): 15 | if isinstance(i, ModelFieldExpression): 16 | edge_dict[i.field] = edge_dict.pop(i) 17 | -------------------------------------------------------------------------------- /tests/session/conftest.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from aioarango.database import StandardDatabase 3 | from pydiction import Matcher 4 | 5 | from pydango.connection.session import PydangoSession 6 | from tests.conftest import AsyncFixture 7 | 8 | 9 | @pytest.fixture(scope="package") 10 | async def session(database: StandardDatabase) -> AsyncFixture[PydangoSession]: 11 | yield PydangoSession(database=database) 12 | 13 | 14 | @pytest.fixture(scope="package") 15 | def matcher(): 16 | return Matcher() 17 | -------------------------------------------------------------------------------- /pydango/orm/models/types.py: -------------------------------------------------------------------------------- 1 | import sys 2 | from typing import ForwardRef, Union 3 | 4 | from pydango.orm.models.fields import RelationModelField 5 | from pydango.orm.models.relations import Relationship 6 | 7 | if sys.version_info >= (3, 10): 8 | from typing import TypeAlias 9 | else: 10 | from typing_extensions import TypeAlias 11 | 12 | RelationshipFields: TypeAlias = dict[str, RelationModelField] 13 | Relationships: TypeAlias = dict[str, Relationship] 14 | EdgeFieldMapping: TypeAlias = dict[Union[str, ForwardRef], list[str]] 15 | -------------------------------------------------------------------------------- /pydango/query/consts.py: -------------------------------------------------------------------------------- 1 | from typing import Final 2 | 3 | KEY: Final[str] = "_key" 4 | """The key identifier used in the query.""" 5 | 6 | ID: Final[str] = "_id" 7 | """The ID identifier used in the query.""" 8 | 9 | REV: Final[str] = "_rev" 10 | """The revision identifier used in the query.""" 11 | 12 | FROM: Final[str] = "_from" 13 | """The 'from' field used in query relations.""" 14 | 15 | TO: Final[str] = "_to" 16 | """The 'to' field used in query relations.""" 17 | 18 | DYNAMIC_ALIAS: Final[str] = "DynamicAlias" 19 | """Alias used for dynamic query operations.""" 20 | -------------------------------------------------------------------------------- /pydango/__init__.py: -------------------------------------------------------------------------------- 1 | from .connection.session import PydangoSession 2 | from .orm import ( 3 | EdgeCollectionConfig, 4 | EdgeModel, 5 | ORMQuery, 6 | Relation, 7 | VertexCollectionConfig, 8 | VertexModel, 9 | ) 10 | from .query import AQLQuery 11 | from .query.operations import TraversalDirection 12 | 13 | __version__ = "0.3.0" 14 | __all__ = [ 15 | "PydangoSession", 16 | "VertexModel", 17 | "EdgeModel", 18 | "EdgeCollectionConfig", 19 | "VertexCollectionConfig", 20 | "ORMQuery", 21 | "AQLQuery", 22 | "Relation", 23 | "TraversalDirection", 24 | ] 25 | -------------------------------------------------------------------------------- /pydango/query/utils.py: -------------------------------------------------------------------------------- 1 | from typing import Protocol, TypeVar, Union 2 | 3 | from pydango.query.consts import FROM, ID, KEY, REV, TO 4 | from pydango.query.expressions import NEW 5 | 6 | 7 | class Compilable(Protocol): 8 | def compile(self, *args, **kwargs) -> Union[str, None]: ... 9 | 10 | 11 | T = TypeVar("T") 12 | 13 | 14 | def new(*, edge=False, debug=True) -> Union[dict[str, str], NEW]: 15 | _new = NEW() 16 | if debug: 17 | return _new 18 | d = {ID: _new[ID], KEY: _new[KEY], REV: _new[REV]} 19 | if edge: 20 | d.update({FROM: _new[FROM], TO: _new[TO]}) 21 | return d 22 | -------------------------------------------------------------------------------- /tests/test_bench.py: -------------------------------------------------------------------------------- 1 | # @pytest.mark.skip 2 | # async def test_benchmark(database: Database): 3 | # query, _ = simple_query(10) 4 | # 5 | # times = 1 6 | # # result = await deplete_cursor(cursor) 7 | # winner = {1: 0, 2: 0} 8 | # 9 | # for i in range(1000): 10 | # cursor = await query.execute(database) 11 | # execution_time_function1 = timeit.timeit(lambda: deplete_cursor(cursor), number=times) 12 | # cursor = await query.execute(database) 13 | # execution_time_function2 = timeit.timeit(lambda: iterate_cursor(cursor), number=times) 14 | # if execution_time_function1 > execution_time_function2: 15 | # winner[1] += 1 16 | # else: 17 | # winner[2] += 1 18 | # print(winner) 19 | -------------------------------------------------------------------------------- /pydango/query/__init__.py: -------------------------------------------------------------------------------- 1 | from .expressions import IteratorExpression, VariableExpression 2 | from .operations import SortDirection, TraversalDirection, TraversalOperation 3 | from .options import ( 4 | CollectMethod, 5 | CollectOptions, 6 | LoopOptions, 7 | RemoveOptions, 8 | ReplaceOptions, 9 | UpdateOptions, 10 | UpsertOptions, 11 | ) 12 | from .query import AQLQuery 13 | 14 | __all__ = [ 15 | "AQLQuery", 16 | "VariableExpression", 17 | "RemoveOptions", 18 | "ReplaceOptions", 19 | "UpdateOptions", 20 | "UpsertOptions", 21 | "LoopOptions", 22 | "CollectOptions", 23 | "CollectMethod", 24 | "IteratorExpression", 25 | "TraversalOperation", 26 | "SortDirection", 27 | "TraversalDirection", 28 | ] 29 | -------------------------------------------------------------------------------- /pydango/utils.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | from typing import TYPE_CHECKING, Type, cast 3 | 4 | from aioarango.database import StandardDatabase 5 | 6 | from pydango.connection.exceptions import SessionNotInitializedError 7 | from pydango.connection.utils import get_or_create_collection 8 | 9 | if TYPE_CHECKING: 10 | from pydango import PydangoSession 11 | from pydango.orm.models.base import ArangoModel 12 | 13 | 14 | async def init_model(model: type["ArangoModel"], session: "PydangoSession"): 15 | if not session.initialized: 16 | raise SessionNotInitializedError() 17 | 18 | collection = await get_or_create_collection(cast(StandardDatabase, session.database), model) 19 | await session.create_indexes(collection, model) 20 | 21 | 22 | async def init_models(session: "PydangoSession", *models: Type["ArangoModel"]): 23 | if not session.initialized: 24 | raise SessionNotInitializedError() 25 | await asyncio.gather(*[init_model(coll, session) for coll in models]) 26 | -------------------------------------------------------------------------------- /tests/test_queries/conftest.py: -------------------------------------------------------------------------------- 1 | from collections import defaultdict 2 | 3 | import pytest_asyncio 4 | from aioarango.database import Database 5 | 6 | from pydango.query.expressions import NEW 7 | from tests.queries import insert_return_new_query 8 | from tests.test_queries.data import DATA 9 | 10 | 11 | @pytest_asyncio.fixture(scope="package", autouse=True) 12 | async def populate(database: Database): 13 | responses = defaultdict(list) 14 | for coll in DATA: 15 | await database.delete_collection(coll, ignore_missing=True) 16 | await database.create_collection(coll) 17 | for coll in DATA: 18 | for i, row in enumerate(DATA[coll]): 19 | aql, _, __ = insert_return_new_query(coll, row, NEW()) 20 | 21 | response = await database.aql.execute(aql.compile(), bind_vars=aql.bind_vars) 22 | next_ = await response.next() # type: ignore[union-attr] 23 | DATA[coll][i] = next_ 24 | responses[coll].append(next_) 25 | yield 26 | -------------------------------------------------------------------------------- /tests/test_errors.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from pydango.query.expressions import CollectionExpression 4 | from pydango.query.query import AQLQuery 5 | 6 | 7 | def test_invalid_for_expression(): 8 | with pytest.raises(AssertionError): 9 | # should raise TypeError since the expression should be an instance 10 | # of CollectionExpression or IteratorExpression 11 | AQLQuery().for_("users") 12 | 13 | 14 | def test_duplicate_iterator_alias(): 15 | coll1 = CollectionExpression("users", "u") 16 | coll2 = CollectionExpression("posts", "u") 17 | with pytest.raises(ValueError): 18 | AQLQuery().for_(coll1).for_( 19 | coll2 20 | ) # should raise ValueError since the iterator alias "u" is already used in the first FOR clause 21 | 22 | 23 | # def test_missing_return_expression(): 24 | # coll = CollectionExpression("users", "u") 25 | # with pytest.raises(ValueError): 26 | # AQLQuery().for_(coll).compile() # should raise ValueError since no RETURN clause is defined in the query 27 | -------------------------------------------------------------------------------- /docs/session.md: -------------------------------------------------------------------------------- 1 | ## **`PydangoSession`** 2 | 3 | ### Overview 4 | 5 | **`PydangoSession`** stands as the primary gateway for ORM-based interactions with ArangoDB. It wraps essential 6 | functionalities, making database operations like initialization, querying, and document management seamless and 7 | intuitive. 8 | 9 | ### Initialization: 10 | 11 | The class can be instantiated with: 12 | 13 | - a pre-configured StandardDatabase 14 | - by providing details like client, database, username, and password. 15 | 16 | !!! tip 17 | Before using the session, ensure it's initialized by calling the initialize() method. 18 | 19 | ### Methods: 20 | 21 | - **`initialize`**: Set up the session. Mandatory before performing database operations. 22 | - **`create_indexes`**: Define and set up indexes for your models. 23 | - **`save`**: Persist a document. The strategy parameter dictates the save behavior, whether to update 24 | existing or insert new. 25 | - **`get`**: Fetch a document based on its model type and ID. 26 | - **`execute`**: Directly run AQL queries. 27 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2023 nadobando 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /pydango/orm/models/relations.py: -------------------------------------------------------------------------------- 1 | from typing import TYPE_CHECKING, ForwardRef, Optional, Type 2 | 3 | if TYPE_CHECKING: 4 | from pydantic.v1.fields import ModelField 5 | from pydantic.v1.typing import ReprArgs 6 | 7 | from pydango.orm.models.base import LinkTypes 8 | from pydango.orm.models.edge import TEdge 9 | from pydango.orm.models.vertex import TVertexModel 10 | 11 | 12 | class Relationship: 13 | def __init__( 14 | self, 15 | *, 16 | field: "ModelField", 17 | back_populates: Optional[str] = None, 18 | link_model: Type["TVertexModel"], 19 | via_model: Optional[Type["TEdge"]] = None, 20 | link_type: "LinkTypes", 21 | ): 22 | self.via_model = via_model 23 | self.link_type = link_type 24 | self.field = field 25 | self.link_model = link_model 26 | self.back_populates = back_populates 27 | 28 | def __repr_args__(self) -> "ReprArgs": 29 | name = self.link_model.__name__ if not isinstance(self.link_model, ForwardRef) else self.link_model 30 | args = [("link_model", name), ("link_type", self.link_type.value)] 31 | if self.via_model: 32 | args.append(("via_model", self.via_model.__name__)) 33 | return args 34 | -------------------------------------------------------------------------------- /docs/orm/query.md: -------------------------------------------------------------------------------- 1 | ## **`query.py`** 2 | 3 | The module within the orm sub-package provides functionalities and structures for ORM-based querying in 4 | relation to ArangoDB. 5 | It integrates with various parts of the ORM and aids in constructing and executing queries. 6 | 7 | #### Key Features 8 | 9 | - Automatic binding 10 | - `AQL` injection protection 11 | - query building 12 | 13 | ## **`ORMQuery`** 14 | 15 | The `ORMQuery` class is a subclass of `AQLQuery`. 16 | It provides a Pythonic API for constructing queries for ArangoDB. 17 | 18 | ## builder helpers 19 | 20 | ### `for_()` 21 | 22 | the `for_()` method is used to specify the target vertex/edge collection of the query. 23 | 24 | ```python 25 | from pydango.orm import for_ 26 | 27 | 28 | for_(User).filter(User.name == "John Doe").return_(User) 29 | ``` 30 | 31 | ### `traverse()` 32 | 33 | ```python 34 | from pydango.orm import traverse 35 | from pydango.query.expressions import IteratorExpression 36 | from pydango.query import TraversalDirection 37 | 38 | edge = IteratorExpression() 39 | traverse( 40 | (User, edge), 41 | edges={"friends"}, 42 | start="people/1", 43 | depth=(0, 1), 44 | direction=TraversalDirection.OUTBOUND, 45 | ).filter(User.name == "John Doe").return_(User) 46 | ``` 47 | -------------------------------------------------------------------------------- /docs/orm/vertex.md: -------------------------------------------------------------------------------- 1 | ## **`VertexModel`** 2 | 3 | Metaclass: **`VertexMeta`** 4 | 5 | Subclasses: **`BaseArangoModel`** 6 | 7 | ### **Overview** 8 | 9 | The `VertexModel` class represents a vertex in the context of the `pydango` ORM. It provides essential attributes, 10 | methods, and configurations for defining and working with vertices in ArangoDB. 11 | 12 | ### **Attributes** 13 | 14 | - **`id`**: An optional unique identifier for the ArangoDB vertex. 15 | - **`key`**: An optional unique key specific to ArangoDB vertices. 16 | - **`rev`**: An optional revision attribute used in ArangoDB for versioning and conflict resolution. 17 | - **`edges`**: Represents the edges related to this vertex. Allows for dot-notation access to related edges. 18 | - **`Config`**: Inherits from `BaseConfig`, providing Pydantic model-specific configurations. 19 | - **`Collection`**: Inherits from `VertexCollectionConfig`, offering vertex-specific collection configurations. 20 | 21 | ## **Collection** 22 | 23 | ### **`VertexCollectionConfig`** 24 | 25 | Subclasses: 26 | 27 | - **`CollectionConfig`** 28 | 29 | ### **Overview** 30 | 31 | The `VertexCollectionConfig` class provides specific configurations tailored for vertex collections in ArangoDB. It 32 | extends the base `CollectionConfig` with vertex-centric customizations. 33 | -------------------------------------------------------------------------------- /pydango/connection/types.py: -------------------------------------------------------------------------------- 1 | import sys 2 | from collections import defaultdict, namedtuple 3 | from enum import Enum 4 | from typing import Type, Union 5 | 6 | from indexed import IndexedOrderedDict 7 | 8 | from pydango.orm import EdgeModel, VertexModel 9 | from pydango.orm.models import BaseArangoModel 10 | from pydango.query.options import UpsertOptions 11 | 12 | if sys.version_info >= (3, 10): 13 | from typing import TypeAlias 14 | else: 15 | from typing_extensions import TypeAlias 16 | 17 | CollectionUpsertOptions: TypeAlias = dict[Union[str, Type["BaseArangoModel"]], UpsertOptions] 18 | ModelFieldMapping: TypeAlias = dict[int, defaultdict[str, list[tuple[int, int]]]] 19 | VerticesIdsMapping: TypeAlias = dict[Type[VertexModel], dict[int, int]] 20 | EdgesIdsMapping: TypeAlias = dict[Type[EdgeModel], dict[int, dict[int, int]]] 21 | 22 | EdgeCollectionsMapping: TypeAlias = dict[Type[EdgeModel], IndexedOrderedDict[list[EdgeModel]]] 23 | EdgeVerticesIndexMapping = dict[ 24 | Type[EdgeModel], dict[int, dict[tuple[Type[VertexModel], Type[VertexModel]], list[int]]] 25 | ] 26 | 27 | VertexCollectionsMapping = dict[Type[VertexModel], IndexedOrderedDict[BaseArangoModel]] 28 | 29 | RelationGroup = namedtuple("RelationGroup", ["collection", "field", "model", "via_model"]) 30 | 31 | 32 | class UpdateStrategy(str, Enum): 33 | UPDATE = "update" 34 | REPLACE = "replace" 35 | -------------------------------------------------------------------------------- /docs/orm/edge.md: -------------------------------------------------------------------------------- 1 | ## **`EdgeModel`** 2 | 3 | Metaclass: **`EdgeMeta`** 4 | 5 | Subclasses: 6 | 7 | - **`BaseArangoModel`** 8 | 9 | ### **Overview** 10 | 11 | The **`EdgeModel`** class forms the foundational representation of an edge (relationship) 12 | in ArangoDB within the pydango ORM. 13 | It equips developers with the necessary attributes and methods to define and manage edges effectively. 14 | 15 | ### **Attributes** 16 | 17 | - **`id`**: An optional unique identifier for the ArangoDB edge. 18 | - **`key`**: An optional unique key specific to ArangoDB edges. 19 | - **`rev`**: An optional revision attribute used in ArangoDB for versioning and conflict resolution. 20 | - **`from_`**: Represents the starting vertex of the edge. Aliased to **`FROM`** for ArangoDB compatibility. 21 | - **`to`**: Depicts the target vertex of the edge. Aliased to `TO` for compatibility. 22 | - **`Config`**: Inherits from `BaseConfig`, providing **`Pydantic`** model-specific configurations. 23 | - **`Collection`**: A subclass that inherits from **`EdgeCollectionConfig`**, offering edge-specific collection configurations 24 | for the ORM. 25 | 26 | ## **`EdgeCollectionConfig`** 27 | 28 | Subclasses: 29 | 30 | - **`CollectionConfig`** 31 | 32 | ### Overview 33 | 34 | The **`EdgeCollectionConfig`** class provides configurations tailored specifically for edge collections in ArangoDB. By 35 | extending the base CollectionConfig, 36 | -------------------------------------------------------------------------------- /docs/orm/base.md: -------------------------------------------------------------------------------- 1 | ## **`BaseArangoModel`** 2 | 3 | ### Overview 4 | 5 | The `BaseArangoModel` class forms the foundation for defining models in the `pydango` ORM that represent ArangoDB 6 | entities. It provides core attributes, methods, and configurations to facilitate interactions with ArangoDB. 7 | 8 | ### Attributes 9 | 10 | 1. **`id`**: An optional unique identifier for the ArangoDB entity. 11 | 1. **`key`**: An optional unique key specific to ArangoDB entities. 12 | 1. **`rev`**: An optional revision attribute used in ArangoDB for versioning and conflict resolution. 13 | 14 | ### Nested Classes 15 | 16 | - **`Config`**: Specifies configurations for the Pydantic model. It fine-tunes model behavior, especially regarding data 17 | validation and serialization. 18 | 19 | - **`Collection`**: Offers collection-specific configurations for the model, customizing its behavior and settings in 20 | relation to ArangoDB collections. 21 | 22 | ### Methods 23 | 24 | 1. **save_dict(self)**: An abstract method to be implemented in derived classes. It outlines how the model data should 25 | be saved or serialized. 26 | 27 | ______________________________________________________________________ 28 | 29 | This documentation offers a developer-centric guide to the `BaseArangoModel` class. It is designed to help developers 30 | understand and use the class effectively. Adjustments can be made based on further content or specific requirements. 31 | Would you like to proceed with another section or topic? 32 | -------------------------------------------------------------------------------- /stubs/aioarango/client.pyi: -------------------------------------------------------------------------------- 1 | from aioarango.connection import ( 2 | BasicConnection as BasicConnection, 3 | Connection as Connection, 4 | JwtConnection as JwtConnection, 5 | JwtSuperuserConnection as JwtSuperuserConnection, 6 | ) 7 | from aioarango.database import StandardDatabase as StandardDatabase 8 | from aioarango.exceptions import ServerConnectionError as ServerConnectionError 9 | from aioarango.http import DefaultHTTPClient as DefaultHTTPClient, HTTPClient as HTTPClient 10 | from aioarango.resolver import ( 11 | HostResolver as HostResolver, 12 | RandomHostResolver as RandomHostResolver, 13 | RoundRobinHostResolver as RoundRobinHostResolver, 14 | SingleHostResolver as SingleHostResolver, 15 | ) 16 | from typing import Any, Callable, Optional, Sequence, Union 17 | 18 | class ArangoClient: 19 | def __init__( 20 | self, 21 | hosts: Union[str, Sequence[str]] = ..., 22 | host_resolver: str = ..., 23 | http_client: Optional[HTTPClient] = ..., 24 | serializer: Callable[..., str] = ..., 25 | deserializer: Callable[[str], Any] = ..., 26 | ) -> None: ... 27 | async def close(self) -> None: ... 28 | @property 29 | def hosts(self) -> Sequence[str]: ... 30 | @property 31 | def version(self): ... 32 | async def db( 33 | self, 34 | name: str = ..., 35 | username: str = ..., 36 | password: str = ..., 37 | verify: bool = ..., 38 | auth_method: str = ..., 39 | superuser_token: Optional[str] = ..., 40 | ) -> StandardDatabase: ... 41 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # CHANGELOG 2 | 3 | 4 | 5 | ## v0.3.0 (2024-07-20) 6 | 7 | ### Feature 8 | 9 | * feat: allow pydantic v2 usage (#15) ([`62d50a9`](https://github.com/nadobando/pydangorm/commit/62d50a96f65480b5b22101ac7a0d954b89b8253d)) 10 | 11 | 12 | ## v0.2.1 (2023-10-14) 13 | 14 | ### Fix 15 | 16 | * fix: improve PydangoSession __init__ method (#14) 17 | 18 | * fix: improve PydangoSession __init__ method 19 | * improve docs ([`ea7ac0d`](https://github.com/nadobando/pydangorm/commit/ea7ac0d00bbc60641daa3e8c0c99d6d7d096505b)) 20 | 21 | 22 | ## v0.2.0 (2023-10-14) 23 | 24 | ### Chore 25 | 26 | * chore: fix pyproject.toml (#5) ([`e9063ff`](https://github.com/nadobando/pydangorm/commit/e9063ff034e4042d778b992aff2c55191d416fa9)) 27 | 28 | ### Feature 29 | 30 | * feat: support collection config kwargs some session utils and refactor ([`adf7f1a`](https://github.com/nadobando/pydangorm/commit/adf7f1a700b7537eaf7714e9d19abd331836be5f)) 31 | 32 | ### Fix 33 | 34 | * fix: mini refactor of files ([`0e711c7`](https://github.com/nadobando/pydangorm/commit/0e711c715614e6c4f0fb39951ae9969a79c3f72c)) 35 | 36 | * fix: moved query execution to session ([`6957559`](https://github.com/nadobando/pydangorm/commit/6957559a25ef5bb75a3b66dbd45403987a3a633f)) 37 | 38 | 39 | ## v0.1.0 (2023-09-25) 40 | 41 | ### Build 42 | 43 | * build: add py.typed (#3) ([`eb642eb`](https://github.com/nadobando/pydangorm/commit/eb642ebba7cba67b0fc37352776f2f23cedb6330)) 44 | 45 | ### Feature 46 | 47 | * feat: save and fetch models (#1) ([`8935e4f`](https://github.com/nadobando/pydangorm/commit/8935e4f8ec5c39f300cb7f818000fe8563e21989)) 48 | -------------------------------------------------------------------------------- /.run/pytest for tests.session.test_social_network.run.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 29 | 30 | -------------------------------------------------------------------------------- /pydango/orm/models/edge.py: -------------------------------------------------------------------------------- 1 | from abc import ABC 2 | from typing import TYPE_CHECKING, Generic, Optional, TypeVar, Union 3 | 4 | from pydantic.v1 import BaseModel, Field 5 | 6 | from pydango.orm.encoders import jsonable_encoder 7 | from pydango.orm.models import BaseArangoModel, CollectionConfig, CollectionType 8 | from pydango.query.consts import FROM, TO 9 | 10 | if TYPE_CHECKING: 11 | from pydantic.v1.typing import DictStrAny 12 | 13 | TEdge = TypeVar("TEdge", bound="EdgeModel") 14 | 15 | 16 | class EdgeCollectionConfig(CollectionConfig): 17 | type = CollectionType.EDGE 18 | 19 | 20 | class EdgeModel(BaseArangoModel, ABC): 21 | from_: Optional[str] = Field(None, alias=FROM) 22 | to: Optional[Union[str]] = Field(None, alias=TO) 23 | 24 | class Collection(EdgeCollectionConfig): 25 | pass 26 | 27 | def save_dict(self) -> "DictStrAny": 28 | exclude: set[Union[int, str]] = set() 29 | for key in ["from_", "to"]: 30 | if self.__getattribute__(key) is None: 31 | exclude.add(key) 32 | return jsonable_encoder(self, by_alias=True, exclude=exclude) 33 | # return self.dict(by_alias=True, exclude=exclude) 34 | 35 | 36 | T = TypeVar("T", bound=BaseModel) 37 | 38 | 39 | class EdgeData(BaseModel, ABC, Generic[T]): 40 | pass 41 | 42 | 43 | class EdgeDict(dict): 44 | def __getattr__(self, item): 45 | try: 46 | return self[item] 47 | except KeyError: 48 | raise AttributeError(f"'{type(self).__name__}' object has no attribute '{item}'") 49 | 50 | def __setattr__(self, key, value): 51 | self[key] = value 52 | 53 | def __delattr__(self, item): 54 | try: 55 | del self[item] 56 | except KeyError: 57 | raise AttributeError(f"'{type(self).__name__}' object has no attribute '{item}'") 58 | -------------------------------------------------------------------------------- /pydango/indexes.py: -------------------------------------------------------------------------------- 1 | import sys 2 | from dataclasses import dataclass 3 | from typing import TYPE_CHECKING, Optional, Sequence, Union 4 | 5 | if sys.version_info >= (3, 10): 6 | from typing import TypeAlias 7 | else: 8 | from typing_extensions import TypeAlias 9 | 10 | 11 | if TYPE_CHECKING: 12 | from aioarango.typings import Fields 13 | 14 | 15 | @dataclass() 16 | class Index: ... 17 | 18 | 19 | @dataclass() 20 | class GeoIndex(Index): 21 | fields: "Fields" 22 | ordered: Optional[bool] = None 23 | name: Optional[str] = None 24 | in_background: Optional[bool] = None 25 | 26 | 27 | @dataclass 28 | class HashIndex(Index): 29 | fields: Sequence[str] 30 | unique: Optional[bool] = None 31 | sparse: Optional[bool] = None 32 | deduplicate: Optional[bool] = None 33 | name: Optional[str] = None 34 | in_background: Optional[bool] = None 35 | 36 | 37 | @dataclass 38 | class SkipListIndex(Index): 39 | fields: Sequence[str] 40 | unique: Optional[bool] = None 41 | sparse: Optional[bool] = None 42 | deduplicate: Optional[bool] = None 43 | name: Optional[str] = None 44 | in_background: Optional[bool] = None 45 | 46 | 47 | @dataclass 48 | class FullTextIndex(Index): 49 | fields: Sequence[str] 50 | min_length: Optional[int] = None 51 | name: Optional[str] = None 52 | in_background: Optional[bool] = None 53 | 54 | 55 | @dataclass 56 | class PersistentIndex(Index): 57 | fields: Sequence[str] 58 | unique: Optional[bool] = None 59 | sparse: Optional[bool] = None 60 | name: Optional[str] = None 61 | in_background: Optional[bool] = None 62 | 63 | 64 | @dataclass 65 | class TTLIndex(Index): 66 | fields: Sequence[str] 67 | expiry_time: int 68 | name: Optional[str] = None 69 | in_background: Optional[bool] = None 70 | 71 | 72 | Indexes: TypeAlias = Union[GeoIndex, HashIndex, SkipListIndex, FullTextIndex, PersistentIndex, TTLIndex] 73 | -------------------------------------------------------------------------------- /mkdocs.yml: -------------------------------------------------------------------------------- 1 | --- 2 | site_name: pydangORM 3 | theme: 4 | name: material 5 | features: 6 | - content.code.copy 7 | icon: 8 | repo: fontawesome/brands/github 9 | admonition: 10 | note: octicons/tag-16 11 | abstract: octicons/checklist-16 12 | info: octicons/info-16 13 | tip: octicons/squirrel-16 14 | success: octicons/check-16 15 | question: octicons/question-16 16 | warning: octicons/alert-16 17 | failure: octicons/x-circle-16 18 | danger: octicons/zap-16 19 | bug: octicons/bug-16 20 | example: octicons/beaker-16 21 | quote: octicons/quote-16 22 | nav: 23 | - Home: index.md 24 | - Installation: installation.md 25 | - Tutorial: quickstart.md 26 | - Documentation: 27 | - Vertex: orm/vertex.md 28 | - Edge: orm/edge.md 29 | - Collection Config: orm/collection.md 30 | - Query: orm/query.md 31 | - Session: session.md 32 | - API: 33 | - ORM: 34 | - Base: orm/base.md 35 | 36 | # - Vertex: api/orm/vertex.md 37 | # - Edge: api/orm/edge.md 38 | # - Collection Config: api/orm/collection.md 39 | # - Query: api/orm/query.md 40 | # - Session: api/session.md 41 | - Query Core (Low Level): 42 | - Usage: query/usage.md 43 | - Expressions: query/expressions.md 44 | - Functions: query/functions.md 45 | - Options: query/options.md 46 | - Operations: query/operations.md 47 | - Query: query/query.md 48 | 49 | repo_url: https://github.com/nadobando/pydangorm 50 | repo_name: nadobando/pydangorm 51 | 52 | markdown_extensions: 53 | - admonition 54 | - attr_list 55 | - md_in_html 56 | - def_list 57 | 58 | - pymdownx.highlight: 59 | anchor_linenums: true 60 | line_spans: __span 61 | pygments_lang_class: true 62 | - pymdownx.inlinehilite 63 | - pymdownx.snippets 64 | - pymdownx.superfences 65 | - pymdownx.critic 66 | - pymdownx.caret 67 | - pymdownx.keys 68 | - pymdownx.mark 69 | - pymdownx.tilde 70 | - pymdownx.details 71 | 72 | - pymdownx.tasklist: 73 | custom_checkbox: true 74 | -------------------------------------------------------------------------------- /pydango/connection/client.py: -------------------------------------------------------------------------------- 1 | from typing import Optional, Sequence, Union 2 | 3 | from aioarango import ArangoClient, HTTPClient 4 | from aioarango.connection import ( 5 | BasicConnection, 6 | Connection, 7 | JwtConnection, 8 | JwtSuperuserConnection, 9 | ) 10 | 11 | 12 | def make_connection( # nosec: B107 13 | hosts: Union[str, Sequence[str]] = "http://127.0.0.1:8529", 14 | host_resolver: str = "roundrobin", 15 | http_client: Optional[HTTPClient] = None, 16 | db_name: str = "_system", 17 | username: str = "root", 18 | password: str = "", 19 | auth_method: str = "basic", 20 | superuser_token: Optional[str] = None, 21 | ) -> Connection: 22 | client = ArangoClient(hosts, host_resolver, http_client) 23 | 24 | if superuser_token is not None: 25 | return JwtSuperuserConnection( 26 | hosts=client.hosts, 27 | host_resolver=client._host_resolver, 28 | sessions=client._sessions, 29 | db_name=db_name, 30 | http_client=client._http, 31 | serializer=client._serializer, 32 | deserializer=client._deserializer, 33 | superuser_token=superuser_token, 34 | ) 35 | 36 | elif auth_method.lower() == "basic": 37 | return BasicConnection( 38 | hosts=client.hosts, 39 | host_resolver=client._host_resolver, 40 | sessions=client._sessions, 41 | db_name=db_name, 42 | username=username, 43 | password=password, 44 | http_client=client._http, 45 | serializer=client._serializer, 46 | deserializer=client._deserializer, 47 | ) 48 | elif auth_method.lower() == "jwt": 49 | return JwtConnection( 50 | hosts=client.hosts, 51 | host_resolver=client._host_resolver, 52 | sessions=client._sessions, 53 | db_name=db_name, 54 | username=username, 55 | password=password, 56 | http_client=client._http, 57 | serializer=client._serializer, 58 | deserializer=client._deserializer, 59 | ) 60 | 61 | else: 62 | raise ValueError(f"invalid auth_method: {auth_method}") 63 | -------------------------------------------------------------------------------- /tests/test_queries/test_queries_integration.py: -------------------------------------------------------------------------------- 1 | # Example data for populating the database 2 | from typing import Sequence 3 | 4 | import pytest 5 | from aioarango.database import Database 6 | 7 | from pydango.connection.utils import deplete_cursor, iterate_cursor 8 | from tests.queries import multiple_filters_query, projection_complex_query, simple_query 9 | from tests.test_queries.data import DATA 10 | from tests.test_queries.utils import execute_query 11 | 12 | 13 | @pytest.mark.asyncio 14 | async def test_simple_query(database: Database): 15 | query, _ = simple_query(29) 16 | 17 | results = await deplete_cursor(await execute_query(query, database)) 18 | expected = get_at(DATA["users"], 2, 6, 4, 3) 19 | 20 | assert results == expected 21 | 22 | 23 | def get_at(lst: Sequence, *indexes: int): 24 | return [lst.__getitem__(i) for i in indexes] 25 | 26 | 27 | @pytest.mark.asyncio 28 | async def test_multiple_filters_query(database: Database): 29 | query, _ = multiple_filters_query(25, "Female") 30 | results = await deplete_cursor(await execute_query(query, database)) 31 | expected = get_at(DATA["users"], 1, 4, 6) 32 | assert expected == results 33 | 34 | 35 | @pytest.mark.asyncio 36 | async def test_projection_complex_query(database: Database): 37 | query, _, _ = projection_complex_query("Jane Smith", 25) 38 | results = await iterate_cursor(await execute_query(query, database)) 39 | expected_results = [ 40 | {"a": "Jane Smith"}, 41 | ] 42 | assert expected_results == results 43 | 44 | 45 | # @pytest.mark.asyncio 46 | # async def test_sort_filter_query(database: Database): 47 | # query, _, _ = sort_filter_query(28) 48 | # query.sep="\n" 49 | # query.compile() 50 | # print() 51 | # print(query) 52 | # results = await deplete_cursor(await query.execute(database)) 53 | # expected_results = get_at(DATA["users"], 1, 2, 3) 54 | # assert results == expected_results 55 | 56 | 57 | # @pytest.mark.asyncio 58 | # async def test_multiple_collections_query(database: Database): 59 | # query, _, _ = multiple_collections_query(25) 60 | # results = await deplete_cursor(await query.execute(database)) 61 | # assert results == DATA["users"][0:2] 62 | -------------------------------------------------------------------------------- /tests/conftest.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import logging 3 | import sys 4 | from typing import AsyncGenerator, TypeVar 5 | 6 | import pytest 7 | from aioarango import ArangoClient 8 | from aioarango.database import StandardDatabase 9 | 10 | from pydango.connection.utils import get_or_create_db 11 | 12 | 13 | @pytest.fixture(scope="session", autouse=True) 14 | def event_loop(): 15 | try: 16 | loop = asyncio.get_running_loop() 17 | except RuntimeError: 18 | loop = asyncio.new_event_loop() 19 | yield loop 20 | loop.close() 21 | 22 | 23 | exclude = { 24 | "name", 25 | "msg", 26 | "args", 27 | "levelname", 28 | "levelno", 29 | "pathname", 30 | "filename", 31 | "module", 32 | "exc_info", 33 | "exc_text", 34 | "stack_info", 35 | "lineno", 36 | "funcName", 37 | "created", 38 | "msecs", 39 | "relativeCreated", 40 | "thread", 41 | "threadName", 42 | "processName", 43 | "process", 44 | "message", 45 | } 46 | 47 | 48 | @pytest.fixture(autouse=True) 49 | def add_log(caplog): 50 | class CustomFormatter(logging.Formatter): 51 | def format(self, record): 52 | formatted_record = record.getMessage() 53 | 54 | for i in record.__dict__: 55 | if i not in exclude: 56 | formatted_record += f"\n{i}=\n{record.__dict__[i]}" 57 | 58 | return formatted_record 59 | 60 | formatter = CustomFormatter() 61 | handler = logging.StreamHandler(stream=sys.stdout) 62 | handler.setFormatter(formatter) 63 | logging.getLogger("pydango").addHandler(handler) 64 | with caplog.at_level(logging.DEBUG, "pydango"): 65 | yield 66 | 67 | 68 | T = TypeVar("T") 69 | 70 | AsyncFixture = AsyncGenerator[T, None] 71 | 72 | 73 | @pytest.fixture(scope="session") 74 | async def client() -> AsyncFixture[ArangoClient]: 75 | client = ArangoClient() 76 | yield client 77 | await client.close() 78 | 79 | 80 | @pytest.fixture(scope="session") 81 | async def database(client: ArangoClient) -> AsyncFixture[StandardDatabase]: 82 | # await (await client.db("_system")).delete_database("pydango") 83 | # exit() 84 | 85 | db = await get_or_create_db(client, "pydango") 86 | yield db 87 | # await (await client.db("_system")).delete_database("pydango") 88 | -------------------------------------------------------------------------------- /docs/query/options.md: -------------------------------------------------------------------------------- 1 | # **Options** 2 | 3 | A Base class representing general AQL options. 4 | Some AQL Operations have options that can be passed to them. to configure how the operation is performed. 5 | 6 | ## **LoopOptions** 7 | 8 | Represents options specific to loops in AQL. The options include: 9 | 10 | - **`index_hint`**: A hint for which index to use. 11 | - **`force_index_hint`**: Whether to force the use of the index hint. 12 | - **`disable_index`**: If set, disables the use of indexes. 13 | - **`max_projections`**: Maximum number of projections. 14 | - **`use_cache`**: Indicates if caching should be used. 15 | - **`look_ahead`**: The number of lookahead operations. 16 | 17 | ## **ModificationOptions** 18 | 19 | ### BaseModificationOptions 20 | 21 | An abstract base class representing options for modification operations in AQL. 22 | The options include: 23 | 24 | - **`ignore_errors`**: Whether to ignore errors. 25 | - **`wait_for_sync`**: If set, waits for synchronization. 26 | - **`ignore_revs`**: Ignores revisions. 27 | - **`exclusive`**: Not fully described in the current snippet. 28 | - **`refill_index_caches`**: Refills index caches if set. 29 | 30 | ### **RemoveOptions** 31 | 32 | Subclasses: 33 | 34 | - **`BaseModificationOptions`** 35 | 36 | Represents options for the remove operation in AQL. 37 | 38 | ### **UpdateOptions** 39 | 40 | Subclasses: 41 | 42 | - **`BaseModificationOptions`** 43 | 44 | The options include: 45 | 46 | - **`keep_null`**: If set, retains null values. 47 | - **`merge_objects`**: If set, merges objects. 48 | - **`refill_index_caches`**: Refills index caches if set. 49 | 50 | ### **ReplaceOptions** 51 | 52 | Subclasses: 53 | 54 | - **`BaseModificationOptions`** 55 | 56 | Represents options for the replace operation in AQL. 57 | 58 | ### **UpsertOption** 59 | 60 | Subclasses: 61 | 62 | - **`BaseModificationOptions`** 63 | 64 | The options include: 65 | 66 | - **`index_hint`**: hint for which index to use. 67 | 68 | ## **CollectOptions** 69 | 70 | Represents options specific to the COLLECT operation in AQL. 71 | The options include: 72 | 73 | - **`method`**: Specifies the method used for the COLLECT operation **`CollectMethod`** 74 | 75 | ### CollectMethod 76 | 77 | An enumeration representing the method used for the COLLECT operation in AQL. 78 | The values include: 79 | 80 | - **`SORTED`** 81 | - **`HASHED`** 82 | -------------------------------------------------------------------------------- /pydango/orm/models/fields.py: -------------------------------------------------------------------------------- 1 | from typing import TYPE_CHECKING, Any, Dict, Optional, Type, Union, cast 2 | 3 | from pydantic.v1.fields import ModelField 4 | 5 | from pydango.orm.models.sentinel import LazyFetch 6 | from pydango.query.expressions import ( 7 | Expression, 8 | FieldExpression, 9 | IteratorExpression, 10 | VariableExpression, 11 | ) 12 | 13 | if TYPE_CHECKING: 14 | from pydantic.v1.fields import LocStr, ModelOrDc, ValidateReturn 15 | 16 | from pydango.orm.models.vertex import TVertexModel 17 | from pydango.query.expressions import QueryExpression 18 | 19 | 20 | class ModelFieldExpression(FieldExpression): 21 | def __init__(self, field: Union[str, Expression], parent: Type["TVertexModel"]): 22 | super().__init__(field, cast(VariableExpression, parent)) 23 | self.parent = parent # type: ignore[assignment] 24 | 25 | def compile(self, query_ref: "QueryExpression") -> str: 26 | if isinstance(self.field, Expression): 27 | return super().compile(query_ref) 28 | else: 29 | if not isinstance(self.parent, IteratorExpression): 30 | # currently importing ORMQuery creates a circular dependency 31 | compiled = query_ref.orm_bound_vars[self.parent] # type: ignore[attr-defined] 32 | return f"{compiled.compile(query_ref)}.{self.field}" 33 | return super().compile(query_ref) 34 | 35 | def __hash__(self): 36 | return hash(self.field) 37 | 38 | 39 | class RelationModelField(ModelField): 40 | def validate( 41 | self, 42 | v: Any, 43 | values: Dict[str, Any], 44 | *, 45 | loc: "LocStr", 46 | cls: Optional["ModelOrDc"] = None, 47 | ) -> "ValidateReturn": 48 | return super().validate(v, values, loc=loc, cls=cls) if not isinstance(v, LazyFetch) else (v, None) 49 | 50 | 51 | def get_pydango_field(field: ModelField, cls: Type[RelationModelField] = RelationModelField) -> RelationModelField: 52 | return cls( 53 | name=field.name, 54 | type_=field.annotation, 55 | alias=field.alias, 56 | class_validators=field.class_validators, 57 | default=field.default, 58 | default_factory=field.default_factory, 59 | required=field.required, 60 | model_config=field.model_config, 61 | final=field.final, 62 | field_info=field.field_info, 63 | ) 64 | -------------------------------------------------------------------------------- /pydango/connection/utils.py: -------------------------------------------------------------------------------- 1 | from typing import TYPE_CHECKING, Awaitable, Optional, Type, Union, cast, overload 2 | 3 | import aioarango 4 | 5 | if TYPE_CHECKING: 6 | from aioarango import ArangoClient 7 | from aioarango.collection import StandardCollection 8 | from aioarango.database import StandardDatabase 9 | 10 | from pydango.orm.models.base import ArangoModel 11 | 12 | 13 | @overload 14 | async def get_or_create_collection( 15 | db: "StandardDatabase", model: Type["ArangoModel"], *, edge=None 16 | ) -> "StandardCollection": ... 17 | 18 | 19 | @overload 20 | async def get_or_create_collection(db: "StandardDatabase", model: str, *, edge=None) -> "StandardCollection": ... 21 | 22 | 23 | async def get_or_create_collection( 24 | db: "StandardDatabase", model: Union[str, Type["ArangoModel"]], *, edge: Optional[bool] = None 25 | ) -> "StandardCollection": 26 | if isinstance(model, str): 27 | collection_name = model 28 | edge = edge or False 29 | elif collection := getattr(model, "Collection", None): 30 | collection_name = collection.name 31 | if edge is None: 32 | edge = True if collection.type.value == collection.type.EDGE else False 33 | else: 34 | raise AssertionError() 35 | 36 | if not await db.has_collection(collection_name): 37 | try: 38 | return await cast(Awaitable["StandardCollection"], db.create_collection(collection_name, edge=edge)) 39 | except aioarango.exceptions.CollectionCreateError as e: 40 | if e.error_code != 1207: 41 | raise e 42 | 43 | return db.collection(collection_name) 44 | 45 | 46 | async def get_or_create_db( 47 | client: "ArangoClient", db: str, user: str = "root", password: str = "", auth_method: str = "basic", **create_params 48 | ) -> "StandardDatabase": 49 | sys_db = await client.db("_system", username=user, password=password) 50 | 51 | if not await sys_db.has_database(db): 52 | await sys_db.create_database(db, **create_params) 53 | 54 | return await client.db(db, username=user, password=password, auth_method=auth_method) 55 | 56 | 57 | async def deplete_cursor(cursor): 58 | result = [] 59 | while cursor.has_more(): # Fetch until nothing is left on the server. 60 | await cursor.fetch() 61 | while not cursor.empty(): # Pop until nothing is left on the cursor. 62 | result.append(cursor.pop()) 63 | return result 64 | 65 | 66 | async def iterate_cursor(cursor): 67 | return [doc async for doc in cursor] 68 | -------------------------------------------------------------------------------- /docs/query/functions.md: -------------------------------------------------------------------------------- 1 | # **Functions** 2 | 3 | ## **Abstract Functions** 4 | 5 | ### **`BaseFunctionExpression`** 6 | 7 | This is an abstract base class that represents a generic AQL function. 8 | handles the compilation of the function into its AQL representation. 9 | Takes a function name and a list of arguments. 10 | 11 | Converts dictionaries and lists to their respective ObjectExpression and ListExpression representations. 12 | 13 | ### **`FunctionExpression`** 14 | 15 | Subclasses: 16 | 17 | - **`BaseFunctionExpression`** 18 | - **`ReturnableExpression`** 19 | 20 | It represents an AQL function that can be used/returned in a query. 21 | It enforces that a valid function name is provided. 22 | 23 | ### **`ReturnsArray`** 24 | 25 | An abstract base class that mark functions that return arrays. 26 | 27 | ## **Document Functions** 28 | 29 | ### **`Document`** 30 | 31 | Represent the DOCUMENT AQL function, which retrieves a document by its ID. 32 | 33 | ### **`Unset`** 34 | 35 | Represent the UNSET AQL function. 36 | 37 | ```AQL 38 | UNSET(document, attributeName1, attributeName2, ... attributeNameN) 39 | ``` 40 | 41 | Remove the attributes `attributeName1` to `attributeNameN` from document. 42 | All other attributes will be preserved. 43 | 44 | ### **`Merge`** 45 | 46 | Represents the MERGE AQL function. 47 | 48 | ```AQL 49 | MERGE(document1, document2, ... documentN) 50 | ``` 51 | 52 | Merge the documents `document1` to `documentN` into a single document. 53 | 54 | ### **`Has`** 55 | 56 | Represents the HAS AQL function. 57 | 58 | ```AQL 59 | HAS(document, attributeName) 60 | ``` 61 | 62 | This function checks if an attribute exists in a given document. 63 | 64 | ## **List/Array Functions** 65 | 66 | ### **`Length`** 67 | 68 | Represents the LENGTH AQL function. 69 | 70 | ``` 71 | LENGTH(anyArray) 72 | ``` 73 | 74 | It has some additional functionality that allows arithmetic operations like addition and subtraction on the result. 75 | 76 | ### **`Append`** 77 | 78 | Represents the APPEND AQL function. This function appends a value to an array. 79 | 80 | ### **`Concat`** 81 | 82 | Represents the CONCAT AQL function. It concatenates multiple arrays into one. 83 | 84 | ### **`Count`** 85 | 86 | Represents the COUNT AQL function. It counts the number of items in an array. 87 | 88 | ## **Numeric Functions** 89 | 90 | ### **`Sum`** 91 | 92 | Represents the **`SUM`** AQL function. 93 | 94 | ## **String Functions** 95 | 96 | ### **`RegExMatch`** 97 | 98 | Represents the REGEX_MATCHES AQL function. This function matches a string against a regular expression pattern. 99 | 100 | ## **Misc Functions** 101 | 102 | ### **`CollectionsExpression`** 103 | 104 | ```AQL 105 | COLLECTIONS() 106 | ``` 107 | 108 | Represents the COLLECTIONS AQL function. This function likely retrieves information about all collections. 109 | -------------------------------------------------------------------------------- /docs/query/usage.md: -------------------------------------------------------------------------------- 1 | ## **AQLQuery** 2 | 3 | ### 1. Simple Query to Fetch Data 4 | 5 | ```python 6 | from pydango.query import AQLQuery 7 | 8 | # Constructing a query to fetch all users from the "users" collection 9 | query = AQLQuery().for_("user", "users").return_("user") 10 | 11 | # Preparing and printing the query 12 | prepared = query.prepare() 13 | print(prepared.query) 14 | ``` 15 | 16 | Equivalent to: 17 | 18 | ```python 19 | from pydango.query import AQLQuery 20 | from pydango.query.expressions import IteratorExpression, CollectionExpression 21 | 22 | # Constructing a query to fetch all users from the "users" collection 23 | iterator = IteratorExpression("user") 24 | query = AQLQuery().for_(iterator, CollectionExpression("users")).return_(iterator) 25 | 26 | # Preparing and printing the query 27 | prepared = query.prepare() 28 | print(prepared.query) 29 | ``` 30 | 31 | ### 2. Filtering Data with Conditions 32 | 33 | ```python 34 | from pydango.query.expressions import IteratorExpression 35 | 36 | # Fetching users aged 30 from the "users" collection 37 | user = IteratorExpression("user") 38 | query = AQLQuery().for_(user, "users").filter(user.age == 30).return_(user) 39 | 40 | # Preparing and printing the query 41 | prepared = query.prepare() 42 | print(prepared.query) 43 | ``` 44 | 45 | ### 3. Sorting and Limiting Results 46 | 47 | ```python 48 | from pydango.query.expressions import IteratorExpression 49 | from pydango.query.operations import AQLQuery 50 | 51 | # Fetching top 10 users sorted by their names 52 | user = IteratorExpression("user") 53 | 54 | query = AQLQuery().for_(user, "users").sort(+user.name).limit(10).return_(user) 55 | 56 | # Preparing and printing the query 57 | prepared = query.prepare() 58 | print(prepared.query) 59 | ``` 60 | 61 | ### 4. Inserting Data 62 | 63 | ```python 64 | from pydango.query.expressions import NEW 65 | from pydango.query.operations import AQLQuery 66 | 67 | new_user = {"name": "John Doe", "age": 25, "email": "john@example.com"} 68 | 69 | # Inserting a new user into the "users" collection 70 | query = AQLQuery().insert(new_user, "users").return_(NEW()._id) 71 | 72 | # Preparing and printing the query 73 | prepared = query.prepare() 74 | print(prepared.query) 75 | ``` 76 | 77 | ### 5. Complex Query: Aggregation and Grouping 78 | 79 | ```python 80 | from pydango.query.expressions import VariableExpression, AssignmentExpression 81 | from pydango.query.operations import AQLQuery 82 | 83 | user = VariableExpression("users") 84 | category_collect = VariableExpression("categoryCollect") 85 | groups = VariableExpression("groups") 86 | 87 | # Grouping users by category 88 | query = ( 89 | AQLQuery() 90 | .for_(user, "users") 91 | .collect(collect=AssignmentExpression(category_collect, user.category), into=groups) 92 | .return_({"groups": groups, "categories": category_collect}) 93 | ) 94 | 95 | # Preparing and printing the query 96 | prepared = query.prepare() 97 | print(prepared.query) 98 | ``` 99 | -------------------------------------------------------------------------------- /tests/test_queries/test_ecommerce.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from aioarango.database import Database 3 | 4 | from pydango.connection.utils import deplete_cursor 5 | from tests.test_queries.data import DATA 6 | from tests.test_queries.ecommerce_queries import ( 7 | get_ordered_products_with_reviews_query, 8 | get_product_orders_reviews_query, 9 | get_product_reviews_query, 10 | get_user_orders_query, 11 | get_user_reviews_query, 12 | ) 13 | from tests.test_queries.utils import execute_query 14 | 15 | 16 | @pytest.mark.asyncio 17 | async def test_get_user_orders_query(database: Database): 18 | query, orders_coll = get_user_orders_query("1") 19 | results = await deplete_cursor(await execute_query(query, database)) 20 | expected = [ 21 | DATA["orders"][0], 22 | DATA["orders"][1], 23 | ] 24 | assert expected == results 25 | 26 | 27 | @pytest.mark.asyncio 28 | async def test_get_product_reviews_query(database: Database): 29 | query = get_product_reviews_query("1") 30 | results = await deplete_cursor(await execute_query(query, database)) 31 | expected = [ 32 | DATA["reviews"][1], 33 | DATA["reviews"][0], 34 | ] 35 | assert expected == results 36 | 37 | 38 | @pytest.mark.asyncio 39 | async def test_get_user_reviews_query(database: Database): 40 | query, reviews_coll = get_user_reviews_query("2") 41 | results = await deplete_cursor(await execute_query(query, database)) 42 | expected = [ 43 | DATA["reviews"][1], 44 | ] 45 | assert expected == results 46 | 47 | 48 | @pytest.mark.asyncio 49 | async def test_get_product_orders_reviews_query(database: Database): 50 | query = get_product_orders_reviews_query("1") 51 | results = await deplete_cursor(await execute_query(query, database)) 52 | expected = [ 53 | {"product": DATA["products"][0], "orders": DATA["orders"][0], "reviews": DATA["reviews"][1]}, 54 | {"product": DATA["products"][0], "orders": DATA["orders"][0], "reviews": DATA["reviews"][0]}, 55 | ] 56 | assert results == expected 57 | 58 | 59 | @pytest.mark.asyncio 60 | async def test_get_ordered_products_with_reviews_query(database: Database): 61 | query = get_ordered_products_with_reviews_query() 62 | results = await deplete_cursor(await execute_query(query, database)) 63 | expected = [ 64 | {"product": DATA["products"][0], "orders": DATA["orders"][0], "reviews": DATA["reviews"][0]}, 65 | {"product": DATA["products"][0], "orders": DATA["orders"][0], "reviews": DATA["reviews"][1]}, 66 | {"product": DATA["products"][1], "orders": DATA["orders"][0], "reviews": DATA["reviews"][2]}, 67 | # (DATA["products"][2], DATA["orders"][3], DATA["reviews"][3]), 68 | # (DATA["products"][3], DATA["orders"][4], DATA["reviews"][4]), 69 | # (DATA["products"][4], DATA["orders"][5], DATA["reviews"][5]), 70 | # (DATA["products"][5], DATA["orders"][6], DATA["reviews"][6]), 71 | # (DATA["products"][6], DATA["orders"][7], DATA["reviews"][7]), 72 | ] 73 | assert results == expected 74 | -------------------------------------------------------------------------------- /docs/index.md: -------------------------------------------------------------------------------- 1 | ## **Introduction** 2 | 3 | `pydangorm` is a robust and user-friendly **asynchronous** ORM (Object-Relational Mapping) system tailored 4 | for [ArangoDB](https://arangodb.com/), a powerful 5 | multi-model NoSQL database. 6 | 7 | `pydangorm` is inspired by `SQLAlchemy`, a popular ORM system for SQL databases, It aims to provide a similar experience 8 | 9 | ## **Main Libraries** 10 | 11 | `pydangorm` is built upon the foundation of two primary libraries: 12 | 13 | - **pydantic**: An extremely popular data validation and settings management library for Python. In `pydangorm`, 14 | pydantic is utilized to provide robust model definitions and validation, ensuring that data interactions are clean, 15 | consistent, and error-free. 16 | 17 | - **aioarango**: An asynchronous driver for ArangoDB. It powers the core interactions with the ArangoDB database, making 18 | it possible for `pydangorm` to support asynchronous database operations, ensuring optimized I/O-bound tasks. 19 | 20 | ## **Features** 21 | 22 | - **Database Modeling**: Easily define, validate, and interact with your database models. This includes 23 | support for both vertex and edge models (`VertexModel` and `EdgeModel`). 24 | 25 | - **Pythonic Query Building**: Constructing queries for ArangoDB in a **SQLAlchemy** way. With a Pythonic API, you can 26 | effortlessly build complex queries to retrieve or manipulate your data. 27 | 28 | - **Session Management**: Manage your database sessions and connections with ease. Whether it's connecting to the 29 | database or handling transactions, `pydangorm` has got you covered. 30 | 31 | - **Collection Management**: From creating indices to truncating collections, manage all your collection operations 32 | without hassle. 33 | 34 | - **Asynchronous Support**: `pydangorm` is designed for the modern web. With full asynchronous support, your I/O-bound 35 | database tasks will be lightning fast, ensuring your applications remain responsive and scalable. 36 | 37 | - **Lazy Loading**: `pydangorm` supports lazy loading, ensuring that data is only fetched when needed, optimizing 38 | performance and reducing memory usage. 39 | 40 | ### **Roadmap** 41 | 42 | - [x] Support for **`AQL Query Building including trversal`** 43 | - [x] Support for **`VertexModel`** and **`EdgeModel`** 44 | - [x] Support for **`VertexModel`** relationships via **`EdgeModel`** 45 | - [x] Support for **`Model Saving and Updating (single instance)`** 46 | - [x] Support for **`Model Saving and Updating (with relations)`** 47 | - [x] Support for **`Model Deletion (single instance)`** 48 | - [x] Support for **`Model Fetching (single instance)`** 49 | - [x] Support for **`Model Fetching with relations and traversal`** 50 | - [x] Support for **`Model Graph CRUD Operations`** 51 | - [ ] Support for **`Model Deletion Cascade`** 52 | - [ ] Support for **`pydantic` `v2.0`** 53 | - [ ] Support for **`Model Back Population`** 54 | 55 | ______________________________________________________________________ 56 | 57 | ## **Contributions** 58 | 59 | We're actively looking for contributors to help improve `pydangorm` and expand its capabilities. 60 | 61 | Whether you're a seasoned 62 | developer or just starting out, your contributions are valuable to us. 63 | 64 | If you have ideas for new features, 65 | optimizations, or simply want to fix a bug, please check our contribution guidelines or reach out. Together, we can make 66 | `pydangorm` the best ArangoDB ORM for Python! 67 | -------------------------------------------------------------------------------- /tests/queries.py: -------------------------------------------------------------------------------- 1 | from pydango.query.expressions import ( 2 | CollectionExpression, 3 | IteratorExpression, 4 | ObjectExpression, 5 | ) 6 | from pydango.query.query import AQLQuery 7 | 8 | 9 | def simple_query(age): 10 | coll = CollectionExpression("users", "u") 11 | aql_query = AQLQuery().for_(coll).filter(coll.age > age).sort(+coll.age).return_(coll) 12 | return aql_query, coll 13 | 14 | 15 | def multiple_filters_query(age, gender): 16 | coll = CollectionExpression("users", "u") 17 | aql_query = ( 18 | AQLQuery().for_(coll).filter(coll.age > age).filter(coll.gender == gender).sort(+coll.name).return_(coll) 19 | ) 20 | return aql_query, coll 21 | 22 | 23 | def projection_complex_query(name, age): 24 | coll = CollectionExpression("users") 25 | i = IteratorExpression() 26 | aql = ( 27 | AQLQuery() 28 | .for_(i, in_=coll) 29 | .filter( 30 | (i.name == name) & (i.age == age), 31 | ) 32 | .return_(ObjectExpression({"a": i.name}, i)) 33 | ) 34 | return aql, coll, i 35 | 36 | 37 | def sort_filter_query(age): 38 | coll1 = CollectionExpression("users", "u1") 39 | coll2 = CollectionExpression("users", "u2") 40 | aql_query = ( 41 | AQLQuery() 42 | .for_(coll1) 43 | .filter(coll1.age > age) 44 | .for_(coll2) 45 | .filter(coll2.date > coll1.date) 46 | .sort(+coll1.name) 47 | .return_(coll1) 48 | ) 49 | return aql_query, coll1, coll2 50 | 51 | 52 | # def multiple_collections_query(age): 53 | # coll1 = CollectionExpression("users", "u1") 54 | # coll2 = CollectionExpression("orders", "o") 55 | # aql_query = ( 56 | # AQLQuery() 57 | # .for_(coll1) 58 | # .filter(coll1.age > age) 59 | # .for_(coll2) 60 | # .filter(coll2.date > coll1.date) 61 | # .sort(+coll1.name) 62 | # .return_(coll1) 63 | # ) 64 | # return aql_query, coll1, coll2 65 | 66 | 67 | def insert_query(coll, doc): 68 | obj = ObjectExpression(doc) 69 | coll = CollectionExpression(coll) 70 | aql_query = AQLQuery().insert(obj, coll) 71 | return aql_query, coll, obj 72 | 73 | 74 | def delete_query(coll, key: str): 75 | coll = CollectionExpression(coll) 76 | aql_query = AQLQuery().remove(key, coll) 77 | return aql_query, coll 78 | 79 | 80 | def insert_return_new_query(coll, doc, new): 81 | obj = ObjectExpression(doc) 82 | coll = CollectionExpression(coll) 83 | aql_query = AQLQuery().insert(obj, coll).return_(new) 84 | return aql_query, coll, obj 85 | 86 | 87 | def update_query(coll, key: str, doc): 88 | coll = CollectionExpression(coll) 89 | aql_query = AQLQuery().update(key, ObjectExpression(doc), coll) 90 | return aql_query, coll 91 | 92 | 93 | def replace_query(coll, key: str, doc): 94 | coll = CollectionExpression(coll) 95 | aql_query = AQLQuery().replace(key, ObjectExpression(doc), coll) 96 | return aql_query, coll 97 | 98 | 99 | def upsert_query(coll, filter_, insert=None, update=None, replace=None): 100 | coll = CollectionExpression(coll) 101 | aql_query = AQLQuery().upsert( 102 | ObjectExpression(filter_), 103 | insert=ObjectExpression(insert), 104 | replace=replace and ObjectExpression(replace), 105 | update=update and ObjectExpression(update), 106 | collection=coll, 107 | ) 108 | return aql_query, coll 109 | -------------------------------------------------------------------------------- /docs/orm/vertex2.md: -------------------------------------------------------------------------------- 1 | Certainly. Let's create the documentation for the `vertex.py` module in a developer-friendly format. 2 | 3 | ______________________________________________________________________ 4 | 5 | ## Vertex Module Documentation 6 | 7 | ### Introduction 8 | 9 | The `vertex.py` module is integral to the `pydango` ORM, providing foundational classes and utilities for representing 10 | and working with vertices in ArangoDB. 11 | 12 | ### Classes 13 | 14 | #### 1. VertexCollectionConfig 15 | 16 | - **Description**: Configuration specific to a vertex collection in ArangoDB. 17 | - **Attributes**: 18 | - `type`: Set to `CollectionType.NODE`, this attribute classifies the collection as a node or vertex collection in 19 | ArangoDB. 20 | 21 | #### 2. VertexMeta (Metaclass) 22 | 23 | - **Description**: A custom metaclass for vertex models. It processes namespace information, defines relationships 24 | between vertices, and sets up edge models during the class creation process. 25 | 26 | - **Methods**: 27 | 28 | - `_build_edge_to_field_mapping(relationships: Relationships) -> EdgeFieldMapping`: 29 | 30 | - **Purpose**: Constructs a mapping between edges and fields based on provided relationships. 31 | - **Parameters**: 32 | - `relationships`: Relationship information between vertices. 33 | 34 | - `_validate_edges(edge_to_field_mapping: EdgeFieldMapping, namespace: dict) -> None`: 35 | 36 | - **Purpose**: Validates the constructed edge-to-field mappings. 37 | - **Parameters**: 38 | - `edge_to_field_mapping`: Mapping between edges and fields. 39 | - `namespace`: Current namespace of the class being processed. 40 | 41 | - `_build_model(relationships: Relationships, name: str) -> Model`: 42 | 43 | - **Purpose**: Constructs a model based on provided relationships and name. 44 | - **Parameters**: 45 | - `relationships`: Relationship information between vertices. 46 | - `name`: Name for the constructed model. 47 | 48 | #### 3. VertexModel 49 | 50 | - **Description**: Represents a vertex model in the ORM. It defines and manages vertices and their relationships to 51 | edges. 52 | 53 | - **Attributes**: 54 | 55 | - `edges`: Represents the edges related to this vertex. 56 | - `__edge_to_field_mapping__`: A dictionary mapping edges to their respective fields. 57 | 58 | - **Methods**: 59 | 60 | - `__init__(self, **data: Any) -> None`: 61 | 62 | - **Purpose**: Initializes the vertex model. 63 | - **Parameters**: 64 | - `data`: Data to initialize the vertex model with. 65 | 66 | - `dict(self, ...) -> dict`: 67 | 68 | - **Purpose**: Extracts the data from the model in a dictionary format. 69 | - **Parameters**: 70 | - Various parameters to customize the output, such as `include`, `exclude`, `by_alias`, etc. 71 | 72 | ### Tips for Developers: 73 | 74 | 1. When defining a vertex model, extend the `VertexModel` class. Use the provided utilities and methods to ensure proper 75 | relationships and data handling. 76 | 1. The `VertexMeta` metaclass processes and sets up relationships during class creation. Ensure that relationships are 77 | defined correctly to leverage the ORM's capabilities. 78 | 1. Utilize the `VertexModel`'s `dict` method for data extraction and serialization. 79 | 80 | ______________________________________________________________________ 81 | 82 | This documentation provides an overview and developer-centric guide to the `vertex.py` module. Adjustments can be made 83 | based on further content or specific requirements. Would you like to proceed with another section or topic? 84 | -------------------------------------------------------------------------------- /docs/quickstart.md: -------------------------------------------------------------------------------- 1 | ## **Basic Setup** 2 | 3 | Before you can interact with your ArangoDB database, you'll need to set up a connection. Here's a basic example: 4 | 5 | ```python title="Session Setup" 6 | from aioarango import ArangoClient 7 | 8 | from pydango import PydangoSession 9 | 10 | client = ArangoClient() # Add your connection parameters here 11 | session = PydangoSession( 12 | client=client, 13 | database="your_database_name" 14 | # Add your database parameters here 15 | ) 16 | ``` 17 | 18 | ## **Defining Models** 19 | 20 | With `pydangorm`, you can easily define vertex and edge models: 21 | 22 | ```python 23 | from typing import Annotated 24 | import datetime 25 | from pydango.indexes import PersistentIndex 26 | 27 | from pydango import ( 28 | VertexModel, 29 | EdgeModel, 30 | EdgeCollectionConfig, 31 | VertexCollectionConfig, 32 | Relation, 33 | ) 34 | 35 | 36 | class Visited(EdgeModel): 37 | rating: int 38 | on_date: datetime.date 39 | 40 | class Collection(EdgeCollectionConfig): 41 | name = "visited" 42 | indexes = [ 43 | PersistentIndex(fields=["rating"]), 44 | ] 45 | 46 | 47 | class LivesIn(EdgeModel): 48 | since: datetime.datetime 49 | 50 | class Collection(EdgeCollectionConfig): 51 | name = "lives_in" 52 | 53 | 54 | class City(VertexModel): 55 | name: str 56 | population: int 57 | 58 | class Collection(VertexCollectionConfig): 59 | name = "cities" 60 | indexes = [PersistentIndex(fields=["name"])] 61 | 62 | 63 | class Person(VertexModel): 64 | name: str 65 | age: int 66 | lives_in: Annotated[City, Relation[LivesIn]] 67 | visited: Annotated[list[City], Relation[Visited]] 68 | 69 | class Collection(VertexCollectionConfig): 70 | name = "people" 71 | indexes = [ 72 | PersistentIndex(fields=["name"]), 73 | PersistentIndex(fields=["age"]), 74 | ] 75 | ``` 76 | 77 | ## **CRUD Operations** 78 | 79 | Perform basic CRUD operations using the models: 80 | 81 | ```python 82 | # Create a new person 83 | async def async_application(): 84 | person = Person(name="Alice", age=30) 85 | person.lives_in = City(name="Buenos Aires", population=16_500_000) 86 | person.visited = [City(name="San Francisco", population=800_000)] 87 | person.edges.lives_in = LivesIn(since=datetime.datetime.now()) 88 | person.edges.visited = [Visited(rating=5, on_date=datetime.date.today())] 89 | 90 | await session.save(person) 91 | 92 | # Read a person by their ID 93 | retrieved_person = await session.get(Person, person.id) 94 | 95 | # Update the person's age 96 | person.age = 31 97 | await session.save(person) 98 | ``` 99 | 100 | ## **Running Queries** 101 | 102 | ### Simple Query 103 | 104 | Construct and execute a simple query to retrieve all people over the age of 25: 105 | 106 | ```python 107 | from pydango.orm import for_ 108 | 109 | query = for_(Person).filter(Person.age > 25).return_(Person) 110 | people_over_25 = await session.execute(query) 111 | ``` 112 | 113 | ### Traversal Query 114 | 115 | Construct and execute a simple query to cities visited by people who visited the same cities of a person: 116 | 117 | ```python 118 | from pydango.orm import traverse 119 | from pydango import TraversalDirection 120 | 121 | person_visited_cities = traverse( 122 | Person, 123 | edges=[Person.visited], 124 | start=person.id, 125 | depth=(1, 2), 126 | direction=TraversalDirection.INBOUND, 127 | ).return_(Person) 128 | ``` 129 | -------------------------------------------------------------------------------- /tests/test_queries/data.py: -------------------------------------------------------------------------------- 1 | DATA: dict[str, list] = { 2 | "users": [ 3 | {"_key": "1", "name": "Jane Smith", "age": 25, "gender": "Female"}, 4 | {"_key": "2", "name": "Emily Davis", "age": 28, "gender": "Female"}, 5 | {"_key": "3", "name": "John Doe", "age": 30, "gender": "Male"}, 6 | {"_key": "4", "name": "Michael Johnson", "age": 35, "gender": "Male"}, 7 | {"_key": "5", "name": "Emma Wilson", "age": 32, "gender": "Female"}, 8 | {"_key": "6", "name": "David Smith", "age": 27, "gender": "Male"}, 9 | {"_key": "7", "name": "Olivia Johnson", "age": 31, "gender": "Female"}, 10 | {"_key": "8", "name": "James Davis", "age": 29, "gender": "Male"}, 11 | ], 12 | "orders": [ 13 | { 14 | "_key": "1", 15 | "user": "1", 16 | "order_date": "2023-05-01", 17 | "total_amount": 100.0, 18 | "products": ["1", "2"], 19 | "status": "COMPLETED", 20 | }, 21 | { 22 | "_key": "2", 23 | "user": "1", 24 | "order_date": "2023-05-10", 25 | "total_amount": 50.0, 26 | "products": [], 27 | "status": "COMPLETED", 28 | }, 29 | { 30 | "_key": "3", 31 | "user": "2", 32 | "order_date": "2023-05-05", 33 | "total_amount": 200.0, 34 | "products": [], 35 | "status": "COMPLETED", 36 | }, 37 | { 38 | "_key": "4", 39 | "user": "3", 40 | "order_date": "2023-05-15", 41 | "total_amount": 150.0, 42 | "products": [], 43 | "status": "COMPLETED", 44 | }, 45 | { 46 | "_key": "5", 47 | "user": "4", 48 | "order_date": "2023-05-03", 49 | "total_amount": 80.0, 50 | "products": [], 51 | "status": "COMPLETED", 52 | }, 53 | { 54 | "_key": "6", 55 | "user": "5", 56 | "order_date": "2023-05-12", 57 | "total_amount": 120.0, 58 | "products": [], 59 | "status": "COMPLETED", 60 | }, 61 | { 62 | "_key": "7", 63 | "user": "6", 64 | "order_date": "2023-05-07", 65 | "total_amount": 70.0, 66 | "products": [], 67 | "status": "COMPLETED", 68 | }, 69 | { 70 | "_key": "8", 71 | "user": "7", 72 | "order_date": "2023-05-20", 73 | "total_amount": 90.0, 74 | "products": [], 75 | "status": "COMPLETED", 76 | }, 77 | ], 78 | "products": [ 79 | {"_key": "1", "name": "Product 1", "category": "Category A"}, 80 | {"_key": "2", "name": "Product 2", "category": "Category B"}, 81 | {"_key": "3", "name": "Product 3", "category": "Category A"}, 82 | {"_key": "4", "name": "Product 4", "category": "Category C"}, 83 | {"_key": "5", "name": "Product 5", "category": "Category B"}, 84 | {"_key": "6", "name": "Product 6", "category": "Category A"}, 85 | {"_key": "7", "name": "Product 7", "category": "Category C"}, 86 | ], 87 | "reviews": [ 88 | {"_key": "1", "product": "1", "user": "1", "rating": 4}, 89 | {"_key": "2", "product": "1", "user": "2", "rating": 5}, 90 | {"_key": "3", "product": "2", "user": "3", "rating": 3}, 91 | {"_key": "4", "product": "3", "user": "4", "rating": 5}, 92 | {"_key": "5", "product": "4", "user": "5", "rating": 4}, 93 | {"_key": "6", "product": "5", "user": "6", "rating": 2}, 94 | {"_key": "7", "product": "6", "user": "7", "rating": 5}, 95 | {"_key": "8", "product": "7", "user": "8", "rating": 4}, 96 | ], 97 | } 98 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | build-backend = "poetry.core.masonry.api" 3 | requires = ["poetry-core"] 4 | 5 | [tool.autopep8] 6 | aggressive = 3 7 | # ignore = "E501,W6" # or ["E501", "W6"] 8 | in-place = true 9 | max_line_length = 120 10 | recursive = true 11 | 12 | [tool.bandit.assert_used] 13 | skips = ["tests/utils*.py", '**/test_*.py', '**/test_*.py'] 14 | 15 | [tool.black] 16 | exclude = """ 17 | ( 18 | /( 19 | .eggs # exclude a few common directories in the 20 | | .git # root of the project 21 | | .hg 22 | | .mypy_cache 23 | | .tox 24 | | .venv 25 | | .venv2 26 | | venv 27 | | _build 28 | | buck-out 29 | | build 30 | | dist 31 | )/ 32 | ) 33 | """ 34 | include = '\.pyi?$' 35 | line-length = 120 36 | preview = true 37 | target-version = ['py39'] 38 | 39 | [tool.coverage.report] 40 | exclude_lines = [ 41 | "if TYPE_CHECKING:", 42 | "if __name__ == .__main__.:" 43 | ] 44 | 45 | [tool.isort] 46 | profile = "black" 47 | 48 | [tool.mypy] 49 | # explicit_package_bases = true 50 | exclude = """ 51 | ( 52 | /( 53 | backups # exclude a few common directories in the 54 | | local # root of the project 55 | )/ 56 | ) 57 | """ 58 | mypy_path = "./stubs/" 59 | plugins = ["pydantic.mypy"] 60 | warn_redundant_casts = true 61 | 62 | [[tool.mypy.overrides]] 63 | ignore_missing_imports = true 64 | module = [ 65 | "requests_toolbelt", 66 | "indexed" 67 | ] 68 | 69 | [[tool.mypy.overrides]] 70 | disable_error_code = ["attr-defined"] 71 | module = [ 72 | "pydango.connection.client" 73 | ] 74 | 75 | [tool.poetry] 76 | authors = ["nadobando <7695172+nadobando@users.noreply.github.com>"] 77 | classifiers = [ 78 | "License :: OSI Approved :: MIT License", 79 | "Operating System :: OS Independent", 80 | "Programming Language :: Python", 81 | "Programming Language :: Python :: 3.8", 82 | "Programming Language :: Python :: 3.9", 83 | "Programming Language :: Python :: 3.10", 84 | "Programming Language :: Python :: 3.11", 85 | "Programming Language :: Python :: 3.12", 86 | "Programming Language :: Python :: 3.13", 87 | "Topic :: Database", 88 | "Framework :: Pydantic :: 1" 89 | ] 90 | description = "pydantic based ArangoDB ODM" 91 | documentation = "https://nadobando.github.io/pydangorm" 92 | homepage = "https://github.com/nadobando/pydangorm" 93 | license = "MIT" 94 | name = "pydangorm" 95 | packages = [{include = "pydango"}] 96 | readme = "README.md" 97 | repository = "https://github.com/nadobando/pydangorm" 98 | version = "0.3.0" 99 | 100 | [tool.poetry.dependencies] 101 | aioarango = "^1.0.0" 102 | indexed = "^1.3.0" 103 | pydantic = ">=1.10.17" 104 | python = ">=3.9,<4.0" 105 | urllib3 = "==1.26.15" 106 | 107 | [tool.poetry.group.dev.dependencies] 108 | black = "^23.3.0" 109 | coverage = "^7.2.5" 110 | freezegun = "^1.2.2" 111 | isort = "^5.12.0" 112 | mypy = "^1.5.1" 113 | pre-commit = "^3.3.1" 114 | pydiction = "^0" 115 | pytest = "^7.3.1" 116 | pytest-asyncio = "^0.21.0" 117 | pytest-cov = "^4.1.0" 118 | 119 | [tool.poetry.group.docs.dependencies] 120 | mkdocs-material = "^9.4.2" 121 | pymdown-extensions = "^10.3" 122 | 123 | [tool.pytest.ini_options] 124 | addopts = "-ra" 125 | asyncio_mode = "auto" 126 | 127 | [tool.ruff] 128 | exclude = ["stubs"] 129 | line-length = 120 130 | 131 | [tool.semantic_release] 132 | match = "main" 133 | prerelease = false 134 | version_toml = [ 135 | "pyproject.toml:tool.poetry.version" 136 | ] 137 | version_variables = [ 138 | "pydango/__init__.py:__version__" 139 | ] 140 | 141 | [tool.semantic_release.changelog] 142 | exclude_commit_patterns = [ 143 | "^ci:.*", 144 | "^(?!feat:|fix:|perf:|refactor:).*$" 145 | ] 146 | 147 | [tool.semantic_release.publish] 148 | upload_to_vcs_release = false 149 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | ### Python template 2 | # Byte-compiled / optimized / DLL files 3 | __pycache__/ 4 | *.py[cod] 5 | *$py.class 6 | 7 | # C extensions 8 | *.so 9 | 10 | # Distribution / packaging 11 | .Python 12 | build/ 13 | develop-eggs/ 14 | dist/ 15 | downloads/ 16 | eggs/ 17 | .eggs/ 18 | lib/ 19 | lib64/ 20 | parts/ 21 | sdist/ 22 | var/ 23 | wheels/ 24 | share/python-wheels/ 25 | *.egg-info/ 26 | .installed.cfg 27 | *.egg 28 | MANIFEST 29 | 30 | # PyInstaller 31 | # Usually these files are written by a python script from a template 32 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 33 | *.manifest 34 | *.spec 35 | 36 | # Installer logs 37 | pip-log.txt 38 | pip-delete-this-directory.txt 39 | 40 | # Unit test / coverage reports 41 | htmlcov/ 42 | .tox/ 43 | .nox/ 44 | .coverage 45 | .coverage.* 46 | .cache 47 | nosetests.xml 48 | coverage.xml 49 | *.cover 50 | *.py,cover 51 | .hypothesis/ 52 | .pytest_cache/ 53 | cover/ 54 | 55 | # Translations 56 | *.mo 57 | *.pot 58 | 59 | # Django stuff: 60 | *.log 61 | local_settings.py 62 | db.sqlite3 63 | db.sqlite3-journal 64 | 65 | # Flask stuff: 66 | instance/ 67 | .webassets-cache 68 | 69 | # Scrapy stuff: 70 | .scrapy 71 | 72 | # Sphinx documentation 73 | docs/_build/ 74 | 75 | # PyBuilder 76 | .pybuilder/ 77 | target/ 78 | 79 | # Jupyter Notebook 80 | .ipynb_checkpoints 81 | 82 | # IPython 83 | profile_default/ 84 | ipython_config.py 85 | 86 | # pyenv 87 | # For a library or package, you might want to ignore these files since the code is 88 | # intended to run in multiple environments; otherwise, check them in: 89 | # .python-version 90 | 91 | # pipenv 92 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 93 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 94 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 95 | # install all needed dependencies. 96 | #Pipfile.lock 97 | 98 | # poetry 99 | # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. 100 | # This is especially recommended for binary packages to ensure reproducibility, and is more 101 | # commonly ignored for libraries. 102 | # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control 103 | #poetry.lock 104 | 105 | # pdm 106 | # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. 107 | #pdm.lock 108 | # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it 109 | # in version control. 110 | # https://pdm.fming.dev/#use-with-ide 111 | .pdm.toml 112 | 113 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm 114 | __pypackages__/ 115 | 116 | # Celery stuff 117 | celerybeat-schedule 118 | celerybeat.pid 119 | 120 | # SageMath parsed files 121 | *.sage.py 122 | 123 | # Environments 124 | .env 125 | .venv 126 | env/ 127 | venv/ 128 | ENV/ 129 | env.bak/ 130 | venv.bak/ 131 | 132 | # Spyder project settings 133 | .spyderproject 134 | .spyproject 135 | 136 | # Rope project settings 137 | .ropeproject 138 | 139 | # mkdocs documentation 140 | /site 141 | 142 | # mypy 143 | .mypy_cache/ 144 | .dmypy.json 145 | dmypy.json 146 | 147 | # Pyre type checker 148 | .pyre/ 149 | 150 | # pytype static type analyzer 151 | .pytype/ 152 | 153 | # Cython debug symbols 154 | cython_debug/ 155 | *.pyc 156 | # PyCharm 157 | # JetBrains specific template is maintained in a separate JetBrains.gitignore that can 158 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore 159 | # and can be added to the global gitignore or merged into this file. For a more nuclear 160 | # option (not recommended) you can uncomment the following to ignore the entire idea folder. 161 | #.idea/ 162 | __pycache__ 163 | .idea 164 | local 165 | stubs/* 166 | .ruff_cache 167 | -------------------------------------------------------------------------------- /docs/orm/collection.md: -------------------------------------------------------------------------------- 1 | ## **`CollectionConfig`** 2 | 3 | The `CollectionConfig` class provides configuration specific to an ArangoDB collection. It defines attributes that 4 | represent various settings and configurations for a collection in ArangoDB, allowing developers to fine-tune collection 5 | behavior. 6 | 7 | ### **Attributes** 8 | 9 | - **`name`**: The name of the ArangoDB collection. 10 | - **`type`**: The type of the collection, represented as an enum (`CollectionType`). 11 | - **`sync`**: A boolean indicating whether to synchronize the collection. Default is `False`. 12 | - **`system`**: A boolean indicating if the collection is a system collection. Default is `False`. 13 | - **`key_generator`**: Specifies the type of key generation strategy. Possible values are "traditional" and " 14 | - **`autoincrement`**". Default is "traditional". 15 | - **`user_keys`**: A boolean indicating whether user-generated keys are allowed. Default is `False`. 16 | - **`key_increment`**: An integer specifying the increment value for auto-incrementing keys. 17 | - **`key_offset`**: An integer specifying the offset value for auto-incrementing keys. 18 | - **`shard_fields`**: A list of fields that determine the sharding strategy. 19 | - **`shard_count`**: An integer indicating the number of shards for the collection. 20 | - **`replication_factor`**: An integer specifying the replication factor for the collection. 21 | - **`shard_like`**: A string representing another collection to use as a sharding reference. Available in enterprise 22 | - editions only. 23 | - **`sync_replication`**: A boolean indicating whether to synchronize replication. 24 | - **`enforce_replication_factor`**: A boolean indicating whether to enforce the specified replication factor. 25 | - **`sharding_strategy`**: Specifies the sharding strategy. Possible values include "community-compat", " 26 | enterprise-smart-edge-compat", and "enterprise-smart-edge". 27 | - **`smart_join_attribute`**: A string specifying the attribute used for smart joins. Available in enterprise editions 28 | only. 29 | - **`write_concern`**: An integer indicating the level of write concern for the collection. 30 | - **`sync_schema`**: A boolean indicating whether to synchronize the schema. Default is `False`. 31 | - **`indexes`**: A sequence of index configurations [**`Indexes`**](#indexes) for the collection. Default is an empty list. 32 | 33 | ### **Tips for Developers** 34 | 35 | ## **Indexes** 36 | 37 | ### Overview 38 | 39 | The indexes module offers a suite of classes to define and work with various types of indexes in ArangoDB collections, 40 | optimizing query performance. 41 | 42 | ### Indexes 43 | 44 | - **`GeoIndex`**: Define geospatial indexes for querying based on geographical locations. 45 | - **`HashIndex`**: Craft hash indexes for rapid equality-based lookups. 46 | - **`SkipListIndex`**: Ideal for range queries, providing a range-based indexing mechanism. 47 | - **`FullTextIndex`**: Optimize your text-based queries with this full-text search index. 48 | - **`PersistentIndex`**: Ensures the index remains stored on disk for persistence. 49 | - **`TTLIndex`**: Automatically remove documents post a specified time with this Time-To-Live index. 50 | 51 | !!! tip 52 | Tips for Developers 53 | 54 | 1. When setting up a collection in ArangoDB through the ORM, utilize the `CollectionConfig` class to customize 55 | collection behavior. 56 | 1. Ensure that the `name` attribute is set, as it determines the name of the collection in ArangoDB. 57 | 1. If using the enterprise edition of ArangoDB, consider leveraging the enterprise-specific attributes like `shard_like` 58 | and `smart_join_attribute` for advanced configurations. 59 | 1. Adjust the `indexes` attribute to define specific indexes on the collection for optimized queries. 60 | 1. Determine the nature of your queries to select the appropriate index type. For instance, use GeoIndex for location-based 61 | queries and FullTextIndex for textual searches. 62 | 1. Always specify the fields attribute when defining an index, as it determines which fields in the collection the index 63 | applies to. 64 | 1. Consider using the `in_background` attribute if you want to create the index without blocking other operations. 65 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | fail_fast: false 3 | default_stages: 4 | - commit 5 | repos: 6 | - repo: https://github.com/asottile/pyupgrade 7 | rev: v3.13.0 8 | hooks: 9 | - id: pyupgrade 10 | args: 11 | - --keep-runtime-typing 12 | 13 | - repo: https://github.com/pre-commit/mirrors-isort 14 | rev: v5.10.1 15 | hooks: 16 | - id: isort 17 | args: 18 | - --profile=black 19 | 20 | - repo: https://github.com/psf/black-pre-commit-mirror 21 | rev: 23.9.1 22 | hooks: 23 | - id: black 24 | args: 25 | - --config=pyproject.toml 26 | 27 | - repo: https://github.com/myint/autoflake 28 | rev: v2.2.1 29 | hooks: 30 | - id: autoflake 31 | exclude: .*/__init__.py 32 | args: 33 | - --in-place 34 | - --remove-all-unused-imports 35 | - --expand-star-imports 36 | - --remove-duplicate-keys 37 | - --remove-unused-variables 38 | 39 | - repo: https://github.com/macisamuele/language-formatters-pre-commit-hooks 40 | rev: v2.10.0 41 | hooks: 42 | - id: pretty-format-toml 43 | args: [--autofix] 44 | files: toml 45 | 46 | - repo: https://github.com/pre-commit/pygrep-hooks 47 | rev: v1.10.0 48 | hooks: 49 | - id: python-use-type-annotations 50 | - id: python-check-blanket-noqa 51 | 52 | - repo: https://github.com/charliermarsh/ruff-pre-commit 53 | rev: v0.0.291 54 | hooks: 55 | - id: ruff 56 | args: 57 | - --config 58 | - ./pyproject.toml 59 | - --fix 60 | 61 | - repo: https://github.com/PyCQA/bandit 62 | rev: 1.7.5 63 | hooks: 64 | - id: bandit 65 | types: 66 | - python 67 | args: 68 | - -c 69 | - pyproject.toml 70 | - --quiet 71 | additional_dependencies: 72 | - bandit[toml] 73 | - toml 74 | 75 | - repo: https://github.com/pre-commit/mirrors-mypy 76 | rev: v1.5.1 77 | hooks: 78 | - id: mypy 79 | additional_dependencies: 80 | - mypy-extensions 81 | - pydantic==1.10.12 82 | - pytest~=7.3.1 83 | - httpx~=0.18.2 84 | - pydiction~=0.1.0 85 | - pytest-asyncio~=0.21.0 86 | 87 | 88 | 89 | 90 | - repo: https://github.com/pre-commit/pre-commit-hooks 91 | rev: v4.4.0 92 | hooks: 93 | - id: check-ast 94 | - id: check-merge-conflict 95 | - id: trailing-whitespace 96 | - id: end-of-file-fixer 97 | - id: check-symlinks 98 | - id: check-toml 99 | - id: debug-statements 100 | - id: no-commit-to-branch 101 | args: 102 | - --branch 103 | - main 104 | 105 | 106 | - repo: local 107 | hooks: 108 | - id: poetry-lock 109 | name: poetry-install 110 | description: run poetry install to install dependencies from the lock file 111 | entry: poetry lock 112 | args: 113 | - --no-update 114 | language: python 115 | pass_filenames: false 116 | files: pyproject.toml 117 | 118 | - id: poetry-install 119 | name: poetry-install 120 | description: run poetry install to install dependencies from the lock file 121 | entry: poetry install 122 | args: 123 | - --no-root 124 | language: python 125 | pass_filenames: false 126 | stages: [post-checkout, post-merge] 127 | always_run: true 128 | 129 | 130 | - repo: https://github.com/jumanjihouse/pre-commit-hook-yamlfmt 131 | rev: 0.2.3 132 | hooks: 133 | - id: yamlfmt 134 | args: 135 | - --offset 136 | - '2' 137 | - --mapping 138 | - '2' 139 | - --sequence 140 | - '4' 141 | 142 | 143 | - repo: https://github.com/executablebooks/mdformat 144 | rev: 0.7.17 # Use the ref you want to point at 145 | hooks: 146 | - id: mdformat 147 | # Optionally add plugins 148 | additional_dependencies: 149 | - mdformat-gfm 150 | - mdformat-black 151 | -------------------------------------------------------------------------------- /docs/query/query.md: -------------------------------------------------------------------------------- 1 | # **Query** 2 | 3 | The pydango/query package provides a comprehensive and Pythonic interface for constructing and executing queries on 4 | ArangoDB. It abstracts the complexities of direct AQL (ArangoDB Query Language) and offers a structured approach to 5 | build both simple and complex queries with ease. 6 | 7 | ### **AQLQuery** 8 | 9 | #### **Introduction** 10 | 11 | The `AQLQuery` class offers a flexible and Pythonic interface for constructing, managing, and preparing AQL queries. It provides methods corresponding to a variety of AQL operations, allowing users to create complex queries by chaining these operations together. 12 | 13 | #### Class Attributes: 14 | 15 | - `_ops`: A list of operations associated with the query. 16 | - `sep`: Specifies the separator between different parts of the query. 17 | - `bind_vars`, `compiled_vars`, `__dynamic_vars__`, `__used_vars__`: Various attributes related to variables and their management within the query. 18 | - `_parameters`: Holds query parameters. 19 | - `_var_counter`, `_param_counter`: Counters for generating unique variable and parameter names. 20 | - `parent`: Reference to a parent `AQLQuery`, if any. 21 | - `__is_modification_query__`: Boolean indicating if the query modifies data. 22 | 23 | #### Methods: 24 | 25 | ##### **`for_`** 26 | 27 | `for_(self, collection_or_variable, in_: Expression) -> 'AQLQuery'` 28 | Adds a [**`FOR`**](./operations.md#foroperation) operation to the query. Iterates over a collection or variable. 29 | 30 | ##### **`filter`** 31 | 32 | `filter(self, filter_expr: Expression) -> 'AQLQuery'` 33 | Adds a [**`FILTER`**](./operations.md#filteroperation) operation to the query. Filters the results of a query based on a condition. 34 | 35 | ##### **`sort`** 36 | 37 | `sort(self, *args: Expression) -> 'AQLQuery'` 38 | Adds a [**`SORT`**](./operations.md#sortoperation) operation to the query. Sorts the results of a query based on provided parameters. 39 | 40 | ##### **`let`** 41 | 42 | `let(self, variable: Union[str, VariableExpression], value: Expression) -> Union[VariableExpression, 'AQLQuery']` 43 | Adds a [**`LET`**](./operations.md#letoperation) operation to the query. Defines a variable within the query. 44 | 45 | ##### **`return_`** 46 | 47 | `return_(self, return_expr: Expression) -> 'AQLQuery'` 48 | Adds a [**`RETURN`**](./operations.md#returnoperation) operation to the query. Specifies the return value of the query. 49 | 50 | ##### **`limit`** 51 | 52 | `limit(self, limit: int, offset: Optional[int] = None) -> 'AQLQuery'` 53 | Adds a [**`LIMIT`**](./operations.md#limitoperation) operation to the query. Limits the number of results returned by the query. 54 | 55 | ##### **`insert`** 56 | 57 | `insert(self, doc: Dict[str, Any], collection: str) -> 'AQLQuery'` 58 | Adds an [**`INSERT`**](./operations.md#insertoperation) operation to the query. Inserts a document into a collection. 59 | 60 | ##### **`remove`** 61 | 62 | `remove(...) -> 'AQLQuery'` 63 | Adds a [**`REMOVE`**](./operations.md#removeoperation) operation to the query. Removes documents from a collection. 64 | 65 | ##### **`update`** 66 | 67 | `update(...) -> 'AQLQuery'` 68 | Adds an [**`UPDATE`**](./operations.md#updateoperation) operation to the query. Updates documents in a collection. 69 | 70 | ##### **`replace`** 71 | 72 | `replace(...) -> 'AQLQuery'` 73 | Adds a [**`REPLACE`**](./operations.md#replaceoperation) operation to the query. Replaces documents in a collection. 74 | 75 | ##### **`upsert`** 76 | 77 | `upsert(...) -> 'AQLQuery'` 78 | Adds an [**`UPSERT`**](./operations.md#upsertoperation) operation to the query. Inserts or updates documents in a collection. 79 | 80 | ##### **`collect`** 81 | 82 | `collect(...) -> 'AQLQuery'` 83 | Adds a [**`COLLECT`**](./operations.md#collectoperation) operation to the query. Collects documents from a collection. 84 | 85 | ##### **`traverse`** 86 | 87 | `traverse(...) -> 'AQLQuery'` 88 | Creates a [**`TRAVERSE`**](./operations.md#traverseoperation) operation. Traverses a graph. 89 | 90 | ##### **`prepare`** 91 | 92 | `prepare() -> PreparedQuery` 93 | Prepares the query for execution, returning a `PreparedQuery` instance. 94 | 95 | ### **PreparedQuery** 96 | 97 | #### Introduction 98 | 99 | The PreparedQuery class represents a prepared AQL query ready for execution against an ArangoDB instance. It encapsulates the AQL query string and any bind variables that need to be provided alongside the query. 100 | 101 | #### Class Attributes: 102 | 103 | - **`query`**: A string that holds the AQL query. 104 | - **`bind_vars`**: A dictionary of variables to be bound to the query. These are represented in a JSON-compatible format. 105 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Pydango - Asynchronous Pydantic ArangoDB ORM 2 | 3 | `pydangorm` is a Python ORM (Object-Relational Mapping) system tailored for [ArangoDB](https://www.arangodb.com/), a multi-model NoSQL database. It provides a Pythonic interface for defining models, constructing queries, and interacting with ArangoDB, abstracting away the direct complexities of database interactions. 4 | 5 | ## Features 6 | 7 | - **Model Definitions with pydantic(v1)**: Easily define and validate your database models using `pydantic`. 8 | 9 | - VertexModel 10 | - EdgeModel 11 | 12 | - **Pythonic Query Building**: Construct complex ArangoDB queries with a Pythonic API. 13 | 14 | - **Session Management**: Streamlined management of database sessions and connections. 15 | 16 | - **Collection Management**: Create indices, truncate collections, and perform other collection operations. 17 | 18 | - **Asynchronous Support**: Perform asynchronous database operations for optimized I/O-bound tasks. 19 | 20 | ______________________________________________________________________ 21 | 22 | ## [Full Documentation](https://nadobando.github.io/pydangorm) 23 | 24 | ## Installation 25 | 26 | ```shell 27 | pip install pydangorm 28 | ``` 29 | 30 | ## Quick Start & Usage Examples 31 | 32 | ### Defining Models 33 | 34 | Using `pydangorm`, you can define vertex and edge models with ease: 35 | 36 | ```python 37 | import datetime 38 | from typing import Annotated 39 | 40 | from pydango import ( 41 | EdgeModel, 42 | VertexModel, 43 | EdgeCollectionConfig, 44 | VertexCollectionConfig, 45 | Relation, 46 | ) 47 | from pydango.indexes import PersistentIndex 48 | 49 | 50 | class Visited(EdgeModel): 51 | rating: int 52 | on_date: datetime.date 53 | 54 | class Collection(EdgeCollectionConfig): 55 | name = "visited" 56 | indexes = [ 57 | PersistentIndex(fields=["rating"]), 58 | ] 59 | 60 | 61 | class LivesIn(EdgeModel): 62 | since: datetime.datetime 63 | 64 | class Collection(EdgeCollectionConfig): 65 | name = "lives_in" 66 | 67 | 68 | class Person(VertexModel): 69 | name: str 70 | age: int 71 | lives_in: Annotated["City", Relation[LivesIn]] 72 | visited: Annotated[list["City"], Relation[Visited]] 73 | 74 | class Collection(VertexCollectionConfig): 75 | name = "people" 76 | indexes = [ 77 | PersistentIndex(fields=["name"]), 78 | PersistentIndex(fields=["age"]), 79 | ] 80 | 81 | 82 | class City(VertexModel): 83 | name: str 84 | population: int 85 | 86 | class Collection(VertexCollectionConfig): 87 | name = "cities" 88 | indexes = [ 89 | PersistentIndex(fields=["name"]), 90 | PersistentIndex(fields=["population"]), 91 | ] 92 | ``` 93 | 94 | ### Querying Data 95 | 96 | Construct and execute queries in a Pythonic manner: 97 | 98 | ```python 99 | from aioarango import ArangoClient 100 | from app.models import Person, City, Visited, LivesIn 101 | 102 | from pydango import PydangoSession 103 | from pydango.orm import for_ 104 | from pydango.connection.utils import get_or_create_db, deplete_cursor 105 | 106 | person = Person( 107 | name="John", 108 | age=35, 109 | lives_in=City(name="Buenos Aires", population=30000000), 110 | visited=[ 111 | City(name="Amsterdam", population=123), 112 | City(name="New Delhi", population=123), 113 | ], 114 | edges={ 115 | Person.lives_in: LivesIn(since=datetime.datetime.now()), 116 | Person.visited: [ 117 | Visited(rating=10, on_date=datetime.date.today()), 118 | Visited(rating=10, on_date=datetime.date.today()), 119 | ], 120 | }, 121 | ) 122 | 123 | 124 | async def main(): 125 | db = await get_or_create_db(ArangoClient(), "app") 126 | session = PydangoSession(database=db) 127 | # Retrieving users older than 10 years 128 | await session.save(person) 129 | assert person.id.startswith("people/") 130 | 131 | db_person = await session.get(Person, person.key, fetch_edges=True, depth=(1, 1)) 132 | assert db_person == person 133 | 134 | query = for_(Person).filter(Person.age > 10).sort(-Person.age).return_(Person) 135 | query_result = await session.execute(query) 136 | result = await deplete_cursor(query_result) 137 | ``` 138 | 139 | More detailed examples and scenarios can be found in the `tests` directory, which showcases modeling and querying for different use-cases like cities, families, and e-commerce operations. 140 | 141 | ## Detailed Documentation 142 | 143 | For detailed documentation, please refer to the [documentation](https://nadobando.github.io/pydangorm). 144 | 145 | ## Contributing 146 | 147 | Contributions to `pydangorm` are welcome! Please refer to the `CONTRIBUTING.md` file for guidelines. 148 | 149 | ## License 150 | 151 | `pydangorm` is licensed under [MIT](./LICENSE). See the `LICENSE` file for details. 152 | -------------------------------------------------------------------------------- /pydango/query/options.py: -------------------------------------------------------------------------------- 1 | import json 2 | from abc import ABC 3 | from dataclasses import dataclass 4 | from enum import Enum 5 | from typing import Optional, Union 6 | 7 | from pydango.query.utils import Compilable 8 | 9 | 10 | class Options(Compilable): 11 | _map: dict = {} 12 | 13 | def compile(self): 14 | pairs = [] 15 | 16 | for field, value in self._map.items(): 17 | if value is None: 18 | continue 19 | pairs.append(f"{field}: {json.dumps(value)}") 20 | if pairs: 21 | return f"{{{', '.join(pairs)}}}" 22 | 23 | 24 | @dataclass 25 | class LoopOptions(Options): 26 | index_hint: Optional[Union[list[str], str]] = None 27 | force_index_hint: Optional[bool] = None 28 | disable_index: Optional[bool] = None 29 | max_projections: Optional[int] = None 30 | use_cache: Optional[bool] = None 31 | look_ahead: Optional[int] = None 32 | 33 | def __post_init__(self): 34 | self._map = { 35 | "IndexHint": self.index_hint, 36 | "forceIndexHint": self.force_index_hint, 37 | "disableIndex": self.disable_index, 38 | "maxProjections": self.max_projections, 39 | "useCache": self.use_cache, 40 | "lookAhead": self.look_ahead, 41 | } 42 | 43 | 44 | # noinspection DuplicatedCode 45 | @dataclass 46 | class BaseModificationOptions(Options, ABC): 47 | ignore_errors: Optional[bool] = None 48 | wait_for_sync: Optional[bool] = None 49 | ignore_revs: Optional[bool] = None 50 | exclusive: Optional[bool] = None 51 | refill_index_caches: Optional[bool] = None 52 | 53 | def __post_init__(self): 54 | self._map = { 55 | "ignoreErrors": self.ignore_errors, 56 | "waitForSync": self.wait_for_sync, 57 | "ignoreRevs": self.ignore_revs, 58 | "exclusive": self.exclusive, 59 | "refillIndexCaches": self.refill_index_caches, 60 | } 61 | 62 | 63 | # noinspection DuplicatedCode 64 | @dataclass() 65 | class RemoveOptions(BaseModificationOptions): 66 | ignore_errors: Optional[bool] = None 67 | wait_for_sync: Optional[bool] = None 68 | ignore_revs: Optional[bool] = None 69 | exclusive: Optional[bool] = None 70 | refill_index_caches: Optional[bool] = None 71 | 72 | def __post_init__(self): 73 | self._map = { 74 | "ignoreErrors": self.ignore_errors, 75 | "waitForSync": self.wait_for_sync, 76 | "ignoreRevs": self.ignore_revs, 77 | "exclusive": self.exclusive, 78 | "refillIndexCaches": self.refill_index_caches, 79 | } 80 | 81 | 82 | @dataclass() 83 | class UpdateOptions(BaseModificationOptions): 84 | ignore_errors: Optional[bool] = None 85 | keep_null: Optional[bool] = None 86 | merge_objects: Optional[bool] = None 87 | wait_for_sync: Optional[bool] = None 88 | ignore_revs: Optional[bool] = None 89 | exclusive: Optional[bool] = None 90 | refill_index_caches: Optional[bool] = None 91 | 92 | def __post_init__(self): 93 | self._map = { 94 | "ignoreErrors": self.ignore_errors, 95 | "keepNull": self.keep_null, 96 | "mergeObjects": self.merge_objects, 97 | "waitForSync": self.wait_for_sync, 98 | "ignoreRevs": self.ignore_revs, 99 | "exclusive": self.exclusive, 100 | "refillIndexCaches": self.refill_index_caches, 101 | } 102 | 103 | 104 | # noinspection DuplicatedCode 105 | @dataclass() 106 | class ReplaceOptions(BaseModificationOptions): 107 | ignore_errors: Optional[bool] = None 108 | wait_for_sync: Optional[bool] = None 109 | ignore_revs: Optional[bool] = None 110 | exclusive: Optional[bool] = None 111 | refill_index_caches: Optional[bool] = None 112 | 113 | def __post_init__(self): 114 | self._map = { 115 | "ignoreErrors": self.ignore_errors, 116 | "waitForSync": self.wait_for_sync, 117 | "ignoreRevs": self.ignore_revs, 118 | "exclusive": self.exclusive, 119 | "refillIndexCaches": self.refill_index_caches, 120 | } 121 | 122 | 123 | @dataclass() 124 | class UpsertOptions(BaseModificationOptions): 125 | ignore_errors: Optional[bool] = None 126 | keep_null: Optional[bool] = None 127 | merge_objects: Optional[bool] = None 128 | wait_for_sync: Optional[bool] = None 129 | ignore_revs: Optional[bool] = None 130 | exclusive: Optional[bool] = None 131 | index_hint: Optional[Union[list[str], str]] = None 132 | force_index_hint: Optional[bool] = None 133 | 134 | def __post_init__(self): 135 | self._map = { 136 | "ignoreErrors": self.ignore_errors, 137 | "keepNull": self.keep_null, 138 | "mergeObjects": self.merge_objects, 139 | "waitForSync": self.wait_for_sync, 140 | "ignoreRevs": self.ignore_revs, 141 | "exclusive": self.exclusive, 142 | "indexHint": self.index_hint, 143 | "forceIndexHint": self.force_index_hint, 144 | } 145 | 146 | 147 | class CollectMethod(str, Enum): 148 | SORTED = "sorted" 149 | HASH = "hash" 150 | 151 | 152 | @dataclass() 153 | class CollectOptions(Options): 154 | method: CollectMethod 155 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | --- 2 | name: CI 3 | 4 | on: 5 | push: 6 | branches: 7 | - main 8 | pull_request: 9 | branches: 10 | - main 11 | 12 | 13 | permissions: 14 | checks: write 15 | id-token: write 16 | contents: write 17 | 18 | jobs: 19 | pre-commit: 20 | name: Pre-commit checks 21 | runs-on: ubuntu-latest 22 | steps: 23 | - name: Checkout the code 24 | uses: actions/checkout@v4 25 | 26 | - name: Pre-commit checks 27 | uses: pre-commit/action@v3.0.0 28 | env: 29 | SKIP: no-commit-to-branch,poetry-install,poetry-lock,mdformat 30 | 31 | test: 32 | permissions: write-all 33 | name: Tests 34 | runs-on: ubuntu-latest 35 | services: 36 | arango: 37 | image: arangodb 38 | ports: 39 | - 8529:8529 40 | env: 41 | ARANGO_NO_AUTH: '1' 42 | 43 | 44 | 45 | outputs: 46 | release-id: ${{ steps.generate-release-id.outputs.release-id }} 47 | steps: 48 | - name: Checkout the code 49 | uses: actions/checkout@v4 50 | 51 | - uses: actions/setup-python@v4 52 | with: 53 | python-version: 3.9 54 | 55 | - name: cache poetry install 56 | uses: actions/cache@v3 57 | with: 58 | path: ~/.local 59 | key: poetry-1.4.0 60 | 61 | - name: Install and configure Poetry 62 | uses: snok/install-poetry@v1 63 | with: 64 | version: 1.4.0 65 | virtualenvs-create: true 66 | virtualenvs-in-project: false 67 | installer-parallel: true 68 | 69 | 70 | - name: cache deps 71 | id: cache-deps 72 | uses: actions/cache@v3 73 | with: 74 | path: .venv 75 | key: pydeps-${{ hashFiles('**/poetry.lock') }} 76 | 77 | # Install dependencies. `--no-root` means "install all dependencies but not the project 78 | # itself", which is what you want to avoid caching _your_ code. The `if` statement 79 | # ensures this only runs on a cache miss. 80 | - run: poetry install --no-interaction --no-root --with docs 81 | if: steps.cache-deps.outputs.cache-hit != 'true' 82 | 83 | - run: poetry install --no-interaction --with docs 84 | 85 | - name: test 86 | run: | 87 | poetry run pytest --cov=pydango --cov-report=xml:coverage.xml --junitxml=test-results/test-results.xml tests 88 | 89 | 90 | - name: Test Report 91 | uses: mikepenz/action-junit-report@v4 92 | if: success() || failure() 93 | with: 94 | report_paths: '**/test-results/*.xml' 95 | 96 | 97 | 98 | - name: Coverage Report 99 | uses: 5monkeys/cobertura-action@master 100 | if: success() || failure() 101 | with: 102 | path: coverage.xml 103 | minimum_coverage: 75 104 | fail_below_threshold: true 105 | 106 | 107 | 108 | release: 109 | name: Release 110 | if: github.ref == 'refs/heads/main' 111 | needs: 112 | - pre-commit 113 | - test 114 | runs-on: ubuntu-latest 115 | concurrency: release 116 | permissions: 117 | id-token: write 118 | contents: write 119 | steps: 120 | - uses: actions/checkout@v4 121 | with: 122 | fetch-depth: 0 123 | 124 | - name: cache deps 125 | id: cache-deps 126 | uses: actions/cache@v3 127 | with: 128 | path: /semantic-release 129 | key: semantic-release 130 | 131 | 132 | - name: cache poetry install 133 | uses: actions/cache@v3 134 | with: 135 | path: ~/.local 136 | key: poetry-1.4.0 137 | 138 | - name: Install and configure Poetry 139 | uses: snok/install-poetry@v1 140 | with: 141 | version: 1.4.0 142 | virtualenvs-create: true 143 | virtualenvs-in-project: false 144 | installer-parallel: true 145 | 146 | - name: Python Semantic Release 147 | id: semver 148 | uses: python-semantic-release/python-semantic-release@v8.0.8 149 | with: 150 | github_token: ${{ secrets.GITHUB_TOKEN }} 151 | 152 | - run: echo "cache_id=$(date --utc '+%V')" >> $GITHUB_ENV 153 | - uses: actions/cache@v3 154 | with: 155 | key: mkdocs-material-${{ env.cache_id }} 156 | path: .cache 157 | restore-keys: | 158 | mkdocs-material- 159 | 160 | - run: poetry install --no-interaction --only docs 161 | 162 | - run: poetry run mkdocs gh-deploy --force 163 | 164 | - run: | 165 | poetry build 166 | 167 | - name: Store the distribution packages 168 | uses: actions/upload-artifact@v3 169 | if: steps.semver.outputs.released == 'true' 170 | 171 | with: 172 | name: python-package-distributions 173 | path: dist/ 174 | 175 | outputs: 176 | released: ${{ steps.semver.outputs.released }} 177 | 178 | publish: 179 | name: Publish 180 | needs: 181 | - pre-commit 182 | - release 183 | if: needs.release.outputs.released == 'true' 184 | concurrency: release 185 | runs-on: ubuntu-latest 186 | 187 | steps: 188 | - name: Download all the dists 189 | uses: actions/download-artifact@v3 190 | with: 191 | name: python-package-distributions 192 | path: dist/ 193 | 194 | - name: Publish package distributions to PyPI 195 | 196 | uses: pypa/gh-action-pypi-publish@release/v1 197 | -------------------------------------------------------------------------------- /tests/session/test_cities.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | from typing import TYPE_CHECKING, Annotated, Iterable, Type 3 | 4 | import pytest 5 | from _pytest.fixtures import FixtureRequest 6 | from pydantic.v1 import Field 7 | from pydiction import ANY_NOT_NONE, Matcher 8 | 9 | from pydango.connection.session import PydangoSession 10 | from pydango.indexes import PersistentIndex 11 | from pydango.orm.models import EdgeModel, VertexModel 12 | from pydango.orm.models.base import BaseArangoModel, Relation 13 | from pydango.orm.models.edge import EdgeCollectionConfig 14 | from pydango.orm.models.vertex import VertexCollectionConfig 15 | from pydango.query.consts import ID 16 | from pydango.utils import init_models 17 | 18 | # from tests.utils import find_dict_diffs, ANY_NOT_NONE 19 | # from tests.utils2 import Matcher 20 | 21 | if TYPE_CHECKING: 22 | pass 23 | 24 | 25 | class Visited(EdgeModel): 26 | rating: int 27 | on_date: datetime.date 28 | 29 | class Collection(EdgeCollectionConfig): 30 | name = "visited" 31 | indexes = [ 32 | PersistentIndex(fields=["rating"]), 33 | ] 34 | 35 | 36 | class LivesIn(EdgeModel): 37 | since: datetime.datetime 38 | 39 | class Collection(EdgeCollectionConfig): 40 | name = "lives_in" 41 | 42 | 43 | class Person(VertexModel): 44 | name: str 45 | age: int 46 | lives_in: Annotated["City", Relation[LivesIn]] 47 | visited: Annotated[list["City"], Relation[Visited]] 48 | 49 | class Collection(VertexCollectionConfig): 50 | name = "people" 51 | indexes = [ 52 | PersistentIndex(fields=["name"]), 53 | PersistentIndex(fields=["age"]), 54 | ] 55 | 56 | 57 | class City(VertexModel): 58 | name: str 59 | population: int 60 | 61 | class Collection(VertexCollectionConfig): 62 | name = "cities" 63 | indexes = [PersistentIndex(fields=["name"]), PersistentIndex(fields=["population"])] 64 | 65 | 66 | city = City(name="tlv", population=123) 67 | Person.update_forward_refs() 68 | 69 | 70 | # LivesIn.update_forward_refs() 71 | # Person.update_forward_refs() 72 | def expected_person(person: Person): 73 | expected = { 74 | "_id": ANY_NOT_NONE, 75 | "_key": ANY_NOT_NONE, 76 | "_rev": ANY_NOT_NONE, 77 | "name": person.name, 78 | "age": person.age, 79 | "lives_in": { 80 | "_id": ANY_NOT_NONE, 81 | "_key": ANY_NOT_NONE, 82 | "_rev": ANY_NOT_NONE, 83 | "name": "tlv", 84 | "population": person.lives_in.population, 85 | }, 86 | "visited": [ 87 | { 88 | "_id": ANY_NOT_NONE, 89 | "_key": ANY_NOT_NONE, 90 | "_rev": ANY_NOT_NONE, 91 | "name": person.visited[0].name, 92 | "population": person.visited[0].population, 93 | }, 94 | { 95 | "_id": ANY_NOT_NONE, 96 | "_key": ANY_NOT_NONE, 97 | "_rev": ANY_NOT_NONE, 98 | "name": person.visited[1].name, 99 | "population": person.visited[1].population, 100 | }, 101 | ], 102 | "edges": { 103 | "lives_in": { 104 | "_id": ANY_NOT_NONE, 105 | "_key": ANY_NOT_NONE, 106 | "_rev": ANY_NOT_NONE, 107 | "_from": ANY_NOT_NONE, 108 | "_to": ANY_NOT_NONE, 109 | "since": person.edges.lives_in.since, 110 | }, 111 | "visited": [ 112 | { 113 | "_id": ANY_NOT_NONE, 114 | "_key": ANY_NOT_NONE, 115 | "_rev": ANY_NOT_NONE, 116 | "_from": ANY_NOT_NONE, 117 | "_to": ANY_NOT_NONE, 118 | "on_date": person.edges.visited[0].on_date, 119 | "rating": person.edges.visited[0].rating, 120 | }, 121 | { 122 | "_id": ANY_NOT_NONE, 123 | "_key": ANY_NOT_NONE, 124 | "_rev": ANY_NOT_NONE, 125 | "_from": ANY_NOT_NONE, 126 | "_to": ANY_NOT_NONE, 127 | "on_date": person.edges.visited[1].on_date, 128 | "rating": person.edges.visited[1].rating, 129 | }, 130 | ], 131 | }, 132 | } 133 | return expected 134 | 135 | 136 | @pytest.fixture(scope="module", autouse=True) 137 | async def init_collections(session: PydangoSession): 138 | models: Iterable[Type[BaseArangoModel]] = (Person, City, LivesIn, Visited) 139 | await init_models(session, *models) 140 | 141 | 142 | @pytest.fixture 143 | def person(): 144 | p = Person( 145 | name="John", 146 | age=35, 147 | lives_in=city, 148 | visited=[ 149 | City(name="New York", population=123), 150 | City(name="Amsterdam", population=123), 151 | ], 152 | ) 153 | p.edges.lives_in = LivesIn(since=datetime.datetime.now()) 154 | p.edges.visited = [ 155 | Visited(rating=10, on_date=datetime.date.today()), 156 | Visited(rating=10, on_date=datetime.date.today()), 157 | ] 158 | 159 | return p 160 | 161 | 162 | @pytest.mark.run(order=1) 163 | @pytest.mark.asyncio 164 | async def test_save(matcher: Matcher, session: PydangoSession, request: FixtureRequest, person): 165 | p = await session.save(person) 166 | request.config.cache.set("person_key", p.key) # type: ignore[union-attr] 167 | matcher.assert_declarative_object(p.dict(by_alias=True, include_edges=True), expected_person(p)) 168 | 169 | 170 | class IdProjection(VertexModel): 171 | id: str = Field(alias=ID) 172 | 173 | 174 | @pytest.mark.run(order=2) 175 | async def test_get(matcher: Matcher, session: PydangoSession, request: FixtureRequest): 176 | _id = request.config.cache.get("person_key", None) # type: ignore[union-attr] 177 | result = await session.get(Person, _id, fetch_edges=True) 178 | assert result is not None 179 | matcher.assert_declarative_object(result.dict(by_alias=True, include_edges=True), expected_person(result)) 180 | -------------------------------------------------------------------------------- /docs/query/operations.md: -------------------------------------------------------------------------------- 1 | # **Operations** 2 | 3 | ## **ForOperation** 4 | 5 | Represents the FOR operation in AQL, used to loop over sets of documents in a collection or the results of a subquery. 6 | The class: 7 | 8 | Takes parameters like `collection` or `variable` and `in` which specify what to loop over. 9 | Contains an optional [`LoopOptions`](options.md#loopoptions-) parameter that can be used to specify loop-related options. 10 | 11 | ## **FilterOperation** 12 | 13 | Represents the FILTER operation in AQL, used to filter the results of a query based on a condition. 14 | 15 | ## **SortOperation** 16 | 17 | Represents the SORT operation in AQL, used to sort the results of a query. 18 | 19 | The class can handle multiple sorts, specified as a list. 20 | Each item in the list can be: 21 | 22 | - **`FieldExpression`** 23 | - tuple consisting of a field and sort direction. ("field", SortDirection.ASC) 24 | 25 | ## **InsertOperation** 26 | 27 | Represents the **`INSERT`** operation in AQL. 28 | This operation is used to insert a document into a collection. 29 | 30 | It requires a doc parameter (the document to insert) and a collection parameter (the target collection). 31 | The document can be provided as a dictionary, which is then converted to an ObjectExpression. 32 | 33 | ## **RemoveOperation** 34 | 35 | Represents the REMOVE operation in AQL, which is used to remove a document from a collection. 36 | 37 | The class sets: 38 | 39 | - **`expression`**: **`Union[str, dict, LiteralExpression, FieldExpression, VariableExpression, ObjectExpression]`**, 40 | - **`collection`**: **`Union[str, CollectionExpression]`** 41 | - **`options`**: [**`Optional[RemoveOptions]`**](./options.md#removeoptions). 42 | 43 | ## **UpdateOperation** 44 | 45 | Subclasses: 46 | 47 | - **`BaseChangeOperation`** 48 | 49 | Represents the UPDATE operation in AQL. 50 | 51 | The parameters can be provided as dictionaries or **`ObjectExpression`** 52 | The class sets: 53 | 54 | - **`key`**: **`str`** or **`LiteralExpression`** 55 | - **`obj`**: **`dict`** or **`ObjectExpression`** 56 | - **`collection`** : **`str`** or **`CollectionExpression`** 57 | - **`options`**: an optional [**`UpdateOptions`**](./options.md#updateoptions) parameter that can be used to specify update-related options. 58 | 59 | ## **ReplaceOperation** 60 | 61 | Subclasses: 62 | 63 | - **`BaseChangeOperation`** 64 | 65 | The class sets: 66 | 67 | - **`key`**: **`str`** or **`LiteralExpression`** 68 | - **`obj`**: **`dict`** or **`ObjectExpression`** 69 | - **`collection`** : **`str`** or **`CollectionExpression`** 70 | - **`options`**: an optional [**`ReplaceOptions`**](./options.md#replaceoptions) parameter that can be used to specify update-related options. 71 | 72 | Represents the REPLACE operation in AQL 73 | The initialization parameters are the same as BaseChangeOperation. 74 | 75 | ## **UpsertOperation** 76 | 77 | Represents the UPSERT operation in AQL. 78 | This operation is used to insert a document if it doesn't exist or update/replace it if it does. 79 | 80 | the class sets: 81 | 82 | - **`filter_`**: the condition to match 83 | - **`collection`**: the target collection 84 | - **`insert`**: the document to insert if no match is found and either 85 | - **`update`**: the data to update if a match is found 86 | - **`replace`**: the document to replace if a match is found 87 | - **`options`**: an optional [**`UpsertOptions`**](./options.md#upsertoptions) parameter that can be used to specify upsert-related options. 88 | 89 | ## **LetOperation** 90 | 91 | Represents the LET operation in AQL, 92 | which allows for the assignment of a value to a variable within a query. 93 | 94 | ## ReturnOperation 95 | 96 | Represents the RETURN operation in AQL. 97 | 98 | Takes a return_expr parameter that specifies what to return. 99 | It can be a: 100 | 101 | - collection 102 | - list 103 | - dictionary 104 | The **`distinct`** parameter allows for returning distinct values. 105 | 106 | ## LimitOperation 107 | 108 | Represents the LIMIT operation in AQL, used to limit the number of results returned by a query. 109 | 110 | Takes parameters like **`limit`** and **`offset`** to specify the number of results and the starting point. 111 | 112 | ## CollectOperation 113 | 114 | Represents the COLLECT operation in AQL, which is used to group/aggregate results. 115 | 116 | The class is initialized with various parameters, including: 117 | 118 | - **`collect`**: Specifies the criteria for grouping results. 119 | - **`aggregate`**: Specifies aggregate calculations to be performed on grouped results. 120 | - **`into`**: Specifies the variable into which the grouped results are collected. 121 | - **`keep`**: Specifies which variables to keep after the COLLECT operation. 122 | - **`with_count_into`**: Specifies a variable that will store the number of grouped results. 123 | - **`options`**: an optional [**`CollectOptions`**](./options.md#collectoptions) parameter that can be used to specify upsert-related options. 124 | 125 | The compile method translates the COLLECT operation into its AQL representation, 126 | incorporating the grouping criteria, aggregation calculations, and other parameters. 127 | 128 | ## **TraversalOperation** 129 | 130 | Represents the graph traversal operation in AQL. 131 | 132 | ### TraversalDirection 133 | 134 | An enumeration representing the traversal direction options in AQL graph queries. The options include: 135 | 136 | - **`OUTBOUND`** 137 | - **`INBOUND`** 138 | - **`ANY`** 139 | 140 | ## Abstract Operations 141 | 142 | ### Operation 143 | 144 | This is an abstract base class representing a generic AQL operation. The class: 145 | 146 | Contains a **`query_ref`** attribute which refers to the broader query that the operation is a part of. 147 | Provides an abstract compile method that subclasses need to implement to translate the operation 148 | into its AQL representation. 149 | 150 | ### BaseChangeOperation 151 | 152 | This is an abstract base class that provides common functionality for operations that change data in collections (e.g., UPDATE, REPLACE). 153 | 154 | ## Not Implemented Yet 155 | 156 | ### WindowOperation 157 | 158 | Represents the WINDOW operation in AQL, which is used for windowed calculations on results. 159 | 160 | ### WithOperation 161 | 162 | Represents the WITH operation in AQL. 163 | -------------------------------------------------------------------------------- /pydango/orm/encoders.py: -------------------------------------------------------------------------------- 1 | import dataclasses 2 | from collections import defaultdict 3 | from enum import Enum 4 | from pathlib import PurePath 5 | from types import GeneratorType 6 | from typing import Any, Callable, Dict, List, Optional, Set, Tuple, Union 7 | 8 | from pydantic.v1 import BaseModel 9 | from pydantic.v1.json import ENCODERS_BY_TYPE 10 | 11 | SetIntStr = Set[Union[int, str]] 12 | DictIntStrAny = Dict[Union[int, str], Any] 13 | 14 | 15 | def generate_encoders_by_class_tuples( 16 | type_encoder_map: Dict[Any, Callable[[Any], Any]] 17 | ) -> Dict[Callable[[Any], Any], Tuple[Any, ...]]: 18 | encoders_by_class_tuples: Dict[Callable[[Any], Any], Tuple[Any, ...]] = defaultdict(tuple) 19 | for type_, encoder in type_encoder_map.items(): 20 | encoders_by_class_tuples[encoder] += (type_,) 21 | return encoders_by_class_tuples 22 | 23 | 24 | encoders_by_class_tuples = generate_encoders_by_class_tuples(ENCODERS_BY_TYPE) 25 | 26 | 27 | def jsonable_encoder( 28 | obj: Any, 29 | include: Optional[Union[SetIntStr, DictIntStrAny]] = None, 30 | exclude: Optional[Union[SetIntStr, DictIntStrAny]] = None, 31 | by_alias: bool = True, 32 | exclude_unset: bool = False, 33 | exclude_defaults: bool = False, 34 | exclude_none: bool = False, 35 | custom_encoder: Optional[Dict[Any, Callable[[Any], Any]]] = None, 36 | sqlalchemy_safe: bool = True, 37 | ) -> Any: 38 | custom_encoder = custom_encoder or {} 39 | if custom_encoder: 40 | if type(obj) in custom_encoder: 41 | return custom_encoder[type(obj)](obj) 42 | else: 43 | for encoder_type, encoder_instance in custom_encoder.items(): 44 | if isinstance(obj, encoder_type): 45 | return encoder_instance(obj) 46 | if include is not None and not isinstance(include, (set, dict)): 47 | include = set(include) 48 | if exclude is not None and not isinstance(exclude, (set, dict)): 49 | exclude = set(exclude) 50 | if isinstance(obj, BaseModel): 51 | encoder = getattr(obj.__config__, "json_encoders", {}) 52 | if custom_encoder: 53 | encoder.update(custom_encoder) 54 | obj_dict = obj.dict( 55 | include=include, 56 | exclude=exclude, 57 | by_alias=by_alias, 58 | exclude_unset=exclude_unset, 59 | exclude_none=exclude_none, 60 | exclude_defaults=exclude_defaults, 61 | ) 62 | if "__root__" in obj_dict: 63 | obj_dict = obj_dict["__root__"] 64 | return jsonable_encoder( 65 | obj_dict, 66 | exclude_none=exclude_none, 67 | exclude_defaults=exclude_defaults, 68 | custom_encoder=encoder, 69 | sqlalchemy_safe=sqlalchemy_safe, 70 | ) 71 | if dataclasses.is_dataclass(obj): 72 | obj_dict = dataclasses.asdict(obj) 73 | return jsonable_encoder( 74 | obj_dict, 75 | include=include, 76 | exclude=exclude, 77 | by_alias=by_alias, 78 | exclude_unset=exclude_unset, 79 | exclude_defaults=exclude_defaults, 80 | exclude_none=exclude_none, 81 | custom_encoder=custom_encoder, 82 | sqlalchemy_safe=sqlalchemy_safe, 83 | ) 84 | if isinstance(obj, Enum): 85 | return obj.value 86 | if isinstance(obj, PurePath): 87 | return str(obj) 88 | if isinstance(obj, (str, int, float, type(None))): 89 | return obj 90 | if isinstance(obj, dict): 91 | encoded_dict = {} 92 | allowed_keys = set(obj.keys()) 93 | if include is not None: 94 | allowed_keys &= set(include) 95 | if exclude is not None: 96 | allowed_keys -= set(exclude) 97 | for key, value in obj.items(): 98 | if ( 99 | (not sqlalchemy_safe or (not isinstance(key, str)) or (not key.startswith("_sa"))) 100 | and (value is not None or not exclude_none) 101 | and key in allowed_keys 102 | ): 103 | encoded_key = jsonable_encoder( 104 | key, 105 | by_alias=by_alias, 106 | exclude_unset=exclude_unset, 107 | exclude_none=exclude_none, 108 | custom_encoder=custom_encoder, 109 | sqlalchemy_safe=sqlalchemy_safe, 110 | ) 111 | encoded_value = jsonable_encoder( 112 | value, 113 | by_alias=by_alias, 114 | exclude_unset=exclude_unset, 115 | exclude_none=exclude_none, 116 | custom_encoder=custom_encoder, 117 | sqlalchemy_safe=sqlalchemy_safe, 118 | ) 119 | encoded_dict[encoded_key] = encoded_value 120 | return encoded_dict 121 | if isinstance(obj, (list, set, frozenset, GeneratorType, tuple)): 122 | encoded_list = [] 123 | for item in obj: 124 | encoded_list.append( 125 | jsonable_encoder( 126 | item, 127 | include=include, 128 | exclude=exclude, 129 | by_alias=by_alias, 130 | exclude_unset=exclude_unset, 131 | exclude_defaults=exclude_defaults, 132 | exclude_none=exclude_none, 133 | custom_encoder=custom_encoder, 134 | sqlalchemy_safe=sqlalchemy_safe, 135 | ) 136 | ) 137 | return encoded_list 138 | 139 | if type(obj) in ENCODERS_BY_TYPE: 140 | return ENCODERS_BY_TYPE[type(obj)](obj) 141 | for encoder, classes_tuple in encoders_by_class_tuples.items(): 142 | if isinstance(obj, classes_tuple): 143 | return encoder(obj) 144 | 145 | try: 146 | data = dict(obj) 147 | except Exception as e: 148 | errors: List[Exception] = [] 149 | errors.append(e) 150 | try: 151 | data = vars(obj) 152 | except Exception as e: 153 | errors.append(e) 154 | raise ValueError(errors) from e 155 | return jsonable_encoder( 156 | data, 157 | include=include, 158 | exclude=exclude, 159 | by_alias=by_alias, 160 | exclude_unset=exclude_unset, 161 | exclude_defaults=exclude_defaults, 162 | exclude_none=exclude_none, 163 | custom_encoder=custom_encoder, 164 | sqlalchemy_safe=sqlalchemy_safe, 165 | ) 166 | -------------------------------------------------------------------------------- /pydango/connection/query_utils.py: -------------------------------------------------------------------------------- 1 | from typing import TYPE_CHECKING, Any, Type, Union, cast 2 | 3 | from pydango.connection.graph_utils import ( 4 | EdgesIdsMapping, 5 | ModelFieldMapping, 6 | VerticesIdsMapping, 7 | _build_graph, 8 | ) 9 | from pydango.connection.types import CollectionUpsertOptions, UpdateStrategy 10 | from pydango.orm import ORMQuery 11 | from pydango.orm.models import BaseArangoModel 12 | from pydango.orm.query import for_ 13 | from pydango.query.consts import FROM, KEY, TO 14 | from pydango.query.expressions import ( 15 | IteratorExpression, 16 | RangeExpression, 17 | VariableExpression, 18 | ) 19 | from pydango.query.functions import Length, Merge, UnionArrays 20 | from pydango.query.options import UpsertOptions 21 | from pydango.query.utils import new 22 | 23 | if TYPE_CHECKING: 24 | from pydango.orm import VertexModel 25 | from pydango.query import AQLQuery 26 | 27 | 28 | def _make_upsert_query( 29 | filter_: Any, 30 | i: Any, 31 | model: Union[Type[BaseArangoModel], BaseArangoModel], 32 | query: "AQLQuery", 33 | strategy: UpdateStrategy, 34 | options: Union[UpsertOptions, None] = None, 35 | ): 36 | if strategy == strategy.UPDATE: 37 | query = query.upsert(filter_, i, model.Collection.name, update=i, options=options) 38 | elif strategy == strategy.REPLACE: 39 | query = query.upsert(filter_, i, model.Collection.name, replace=i, options=options) 40 | 41 | return query 42 | 43 | 44 | def _get_upsert_filter( 45 | document: Union["BaseArangoModel", VariableExpression], model: Union[Type["BaseArangoModel"], None] = None 46 | ): 47 | if not isinstance(document, BaseArangoModel) and model is not None: 48 | indexes = model.Collection.indexes 49 | elif isinstance(document, BaseArangoModel): 50 | indexes = document.Collection.indexes 51 | else: 52 | indexes = tuple() 53 | 54 | # todo: check first by _key or _id 55 | 56 | filter_ = {} 57 | for model_index in indexes: 58 | if hasattr(model_index, "unique") and model_index.unique: 59 | filter_ = {index_field: getattr(document, index_field) for index_field in model_index.fields} 60 | 61 | if isinstance(model_index, dict) and model_index.get("unique"): 62 | filter_ = {j: getattr(document, j) for j in model_index.get("fields", [])} 63 | 64 | if not filter_: 65 | key = getattr(document, KEY) 66 | filter_ = {"_key": key} if key is not None else {} # noqa: PyProtectedMember 67 | 68 | return filter_ 69 | 70 | 71 | def _build_upsert_query( 72 | i: IteratorExpression, 73 | strategy: UpdateStrategy, 74 | model: Type["BaseArangoModel"], 75 | docs: Union[VariableExpression, list[VariableExpression]], 76 | *, 77 | edge: bool = False, 78 | ): 79 | filter_ = _get_upsert_filter(i, model) 80 | query = for_(i, in_=docs) 81 | query = _make_upsert_query(filter_, i, model, query, strategy, None).return_(new(edge=edge)) 82 | return query 83 | 84 | 85 | def _build_vertex_query(v, vertices_docs, strategy: UpdateStrategy): 86 | i = IteratorExpression() 87 | from_var = VariableExpression(v.Collection.name) 88 | query = _build_upsert_query(i, strategy, v, vertices_docs) 89 | return from_var, query 90 | 91 | 92 | def _build_graph_query( 93 | document: "VertexModel", 94 | strategy: UpdateStrategy = UpdateStrategy.UPDATE, 95 | collection_options: Union[CollectionUpsertOptions, None] = None, 96 | ) -> tuple[ModelFieldMapping, VerticesIdsMapping, EdgesIdsMapping, ORMQuery]: 97 | query = ORMQuery() 98 | _visited: set[int] = set() 99 | edge_collections, edge_vertex_index, vertex_collections, model_fields_mapping = _build_graph(document, _visited) 100 | vertex_let_queries: dict[Type["VertexModel"], VariableExpression] = {} 101 | vertices_ids: VerticesIdsMapping = {} 102 | edge_ids: EdgesIdsMapping = {} 103 | for v in vertex_collections: 104 | vertex_docs = list(vertex_collections[v].values()) 105 | vertices_ids[v] = {id(doc): i for i, doc in enumerate(vertex_docs)} 106 | from_var, vertex_query = _build_vertex_query(v, vertex_docs, strategy) 107 | vertex_let_queries[v] = from_var 108 | 109 | query.let(from_var, vertex_query) 110 | 111 | edge_let_queries = {} 112 | 113 | for e, coll in edge_vertex_index.items(): 114 | counter = 0 115 | edge_vars = [] 116 | for j, (instance, mapping) in enumerate(list(coll.items())): 117 | iterator = IteratorExpression() 118 | edge_ids.setdefault(e, {}).setdefault(instance, {}).update( 119 | {id(doc): i + counter for i, doc in enumerate(edge_collections[e][instance])} 120 | ) 121 | edge_var_name = f"{e.Collection.name}_{j + 1}" 122 | edge = VariableExpression(edge_var_name) 123 | query.let(edge, edge_collections[e][instance]) 124 | from_model: Type["VertexModel"] 125 | to_model: Type["VertexModel"] 126 | for k, ((from_model, to_model), rels) in enumerate(mapping.items()): 127 | from_ = vertex_collections[from_model].keys().index(instance) 128 | new_rels = [vertex_collections[to_model].keys().index(x) for x in rels] 129 | from_var = vertex_let_queries[from_model] 130 | to_var = vertex_let_queries[to_model] 131 | ret = {FROM: from_var[from_]._id, TO: to_var[iterator]._id} 132 | 133 | edge_from_to = VariableExpression(edge_var_name + f"_{k}_from_to") 134 | query.let(edge_from_to, for_(iterator, new_rels).return_(ret)) 135 | 136 | merger = IteratorExpression("merger") 137 | 138 | merged = VariableExpression(edge_var_name + f"_{k}_merged") 139 | query.let( 140 | merged, 141 | for_(merger, RangeExpression(0, Length(edge_from_to) - 1)).return_( 142 | Merge(edge[merger], edge_from_to[merger]) 143 | ), 144 | ) 145 | edge_vars.append(merged) 146 | counter += len(rels) 147 | 148 | edges: Union[VariableExpression, list[VariableExpression]] 149 | if len(edge_vars) > 1: 150 | edges = cast(list[VariableExpression], UnionArrays(*edge_vars)) 151 | elif len(edge_vars) == 1: 152 | edges = edge_vars[0] 153 | else: 154 | continue 155 | 156 | edge_iter = IteratorExpression() 157 | edge_let_queries[e] = VariableExpression(edge_var_name + "_result") 158 | query.let(edge_let_queries[e], _build_upsert_query(edge_iter, strategy, e, edges, edge=True)) 159 | 160 | return ( 161 | model_fields_mapping, 162 | vertices_ids, 163 | edge_ids, 164 | query.return_( 165 | { 166 | "vertex": {k.Collection.name: v for k, v in vertex_let_queries.items()}, 167 | "edges": {k.Collection.name: v for k, v in edge_let_queries.items()}, 168 | } 169 | ), 170 | ) 171 | -------------------------------------------------------------------------------- /tests/test_queries/ecommerce_queries.py: -------------------------------------------------------------------------------- 1 | from pydango.query.expressions import CollectionExpression, In, ObjectExpression 2 | from pydango.query.query import AQLQuery 3 | 4 | 5 | def get_user_orders_query(user_id): 6 | users_coll = CollectionExpression("users", "u") 7 | orders_coll = CollectionExpression("orders", "o") 8 | aql_query = ( 9 | AQLQuery() 10 | .for_(users_coll) 11 | .filter(users_coll._key == user_id) 12 | .for_(orders_coll) 13 | .filter(users_coll._key == orders_coll.user) 14 | .sort(+orders_coll.order_date) 15 | .return_(orders_coll) 16 | ) 17 | return aql_query, orders_coll 18 | 19 | 20 | def get_product_reviews_query(product_id): 21 | products_coll = CollectionExpression("products", "p") 22 | reviews_coll = CollectionExpression("reviews", "r") 23 | aql_query = ( 24 | AQLQuery() 25 | .for_(products_coll) 26 | .filter(products_coll._key == product_id) 27 | .for_(reviews_coll) 28 | .filter(products_coll._key == reviews_coll.product) 29 | .sort(-reviews_coll.rating) 30 | .return_(reviews_coll) 31 | ) 32 | return aql_query 33 | 34 | 35 | def get_user_reviews_query(user_id): 36 | users_coll = CollectionExpression("users", "u") 37 | reviews_coll = CollectionExpression("reviews", "r") 38 | aql_query = ( 39 | AQLQuery() 40 | .for_(users_coll) 41 | .filter(users_coll._key == user_id) 42 | .for_(reviews_coll) 43 | .filter(users_coll._key == reviews_coll.user) 44 | .sort(-reviews_coll.rating) 45 | .return_(reviews_coll) 46 | ) 47 | return aql_query, reviews_coll 48 | 49 | 50 | def get_product_orders_reviews_query(product_id): 51 | products_coll = CollectionExpression("products", "p") 52 | orders_coll = CollectionExpression("orders", "o") 53 | reviews_coll = CollectionExpression("reviews", "r") 54 | aql_query = ( 55 | AQLQuery() 56 | .for_(products_coll) 57 | .filter(products_coll._key == product_id) 58 | .for_(orders_coll) 59 | .filter(In(products_coll._key, orders_coll.products)) 60 | .for_(reviews_coll) 61 | .filter(products_coll._key == reviews_coll.product) 62 | .sort(+orders_coll.order_date, -reviews_coll.rating) 63 | .return_( 64 | ObjectExpression( 65 | {"product": products_coll.iterator, "orders": orders_coll.iterator, "reviews": reviews_coll.iterator} 66 | ) 67 | ) 68 | ) 69 | return aql_query 70 | 71 | 72 | # def get_user_with_most_orders_query(): 73 | # users_coll = CollectionExpression("users", "u") 74 | # orders_coll = CollectionExpression("orders", "o") 75 | # aql_query = ( 76 | # AQLQuery() 77 | # .for_(users_coll) 78 | # .for_(orders_coll) 79 | # .filter(users_coll._key == orders_coll.user) 80 | # .group_by(users_coll._key) 81 | # .sort(-AQLQuery.count(orders_coll._key)) 82 | # .limit(1) 83 | # .return_(users_coll) 84 | # ) 85 | # return aql_query, users_coll 86 | 87 | 88 | # def get_product_with_highest_rating_query(): 89 | # products_coll = CollectionExpression("products", "p") 90 | # reviews_coll = CollectionExpression("reviews", "r") 91 | # aql_query = ( 92 | # AQLQuery() 93 | # .for_(products_coll) 94 | # .for_(reviews_coll) 95 | # .filter(products_coll._key == reviews_coll.product) 96 | # .group_by(products_coll._key) 97 | # .sort(-AQLQuery.avg(reviews_coll.rating)) 98 | # .limit(1) 99 | # .return_(products_coll) 100 | # ) 101 | # return aql_query, products_coll 102 | 103 | 104 | # def get_users_with_multiple_orders_query(min_orders): 105 | # users_coll = CollectionExpression("users", "u") 106 | # orders_coll = CollectionExpression("orders", "o") 107 | # aql_query = ( 108 | # AQLQuery() 109 | # .for_(users_coll) 110 | # .for_(orders_coll) 111 | # .filter(users_coll._key == orders_coll.user) 112 | # .group_by(users_coll._key) 113 | # .filter(AQLQuery.count(orders_coll._key) > min_orders) 114 | # .return_(users_coll) 115 | # ) 116 | # return aql_query, users_coll 117 | 118 | 119 | # def get_products_without_reviews_query(): 120 | # products_coll = CollectionExpression("products", "p") 121 | # reviews_coll = CollectionExpression("reviews", "r") 122 | # aql_query = ( 123 | # AQLQuery() 124 | # .for_(products_coll) 125 | # .not_for_(reviews_coll) 126 | # .filter(products_coll._key != reviews_coll.product) 127 | # .return_(products_coll) 128 | # ) 129 | # return aql_query, products_coll 130 | 131 | 132 | # def get_users_with_high_total_order_amount_query(min_amount): 133 | # users_coll = CollectionExpression("users", "u") 134 | # orders_coll = CollectionExpression("orders", "o") 135 | # aql_query = ( 136 | # AQLQuery() 137 | # .for_(users_coll) 138 | # .for_(orders_coll) 139 | # .filter(users_coll._key == orders_coll.user) 140 | # .group_by(users_coll._key) 141 | # .filter(AQLQuery.sum(orders_coll.total_amount) > min_amount) 142 | # .return_(users_coll) 143 | # ) 144 | # return aql_query, users_coll 145 | 146 | 147 | def get_ordered_products_with_reviews_query(): 148 | products_coll = CollectionExpression("products", "p") 149 | orders_coll = CollectionExpression("orders", "o") 150 | reviews_coll = CollectionExpression("reviews", "r") 151 | aql_query = ( 152 | AQLQuery() 153 | .for_(products_coll) 154 | .for_(orders_coll) 155 | .for_(reviews_coll) 156 | .filter(In(products_coll._key, orders_coll.products)) 157 | .filter(products_coll._key == reviews_coll.product) 158 | .sort(+orders_coll.order_date) 159 | .return_( 160 | ObjectExpression( 161 | {"product": products_coll.iterator, "orders": orders_coll.iterator, "reviews": reviews_coll.iterator} 162 | ) 163 | ) 164 | ) 165 | return aql_query 166 | 167 | 168 | # def get_users_with_most_recent_order_query(): 169 | # users_coll = CollectionExpression("users", "u") 170 | # orders_coll = CollectionExpression("orders", "o") 171 | # aql_query = ( 172 | # AQLQuery() 173 | # .for_(users_coll) 174 | # .for_(orders_coll) 175 | # .filter(users_coll._key == orders_coll.user) 176 | # .group_by(users_coll._key) 177 | # .sort(-AQLQuery.max(orders_coll.order_date)) 178 | # .limit(1) 179 | # .return_(users_coll) 180 | # ) 181 | # return aql_query, users_coll 182 | 183 | 184 | # def get_users_with_common_ordered_products_query(user_id): 185 | # users_coll = CollectionExpression("users", "u") 186 | # orders_coll = CollectionExpression("orders", "o") 187 | # products_coll = CollectionExpression("products", "p") 188 | # aql_query = ( 189 | # AQLQuery() 190 | # .for_(users_coll) 191 | # .for_(orders_coll) 192 | # .for_(products_coll) 193 | # .filter(users_coll._key == user_id) 194 | # .filter(users_coll._key == orders_coll.user) 195 | # .filter(orders_coll.product == products_coll._key) 196 | # .group_by(users_coll._key) 197 | # .sort(-AQLQuery.count(products_coll._key)) 198 | # .limit(1) 199 | # .return_(users_coll) 200 | # ) 201 | # return aql_query, users_coll 202 | -------------------------------------------------------------------------------- /docs/query/expressions.md: -------------------------------------------------------------------------------- 1 | # **Expressions** 2 | 3 | ## **Basic Expressions** 4 | 5 | ### **`CollectionExpression`** 6 | 7 | Subclasses: 8 | 9 | - **`IterableExpression`** 10 | 11 | Represents ArangoDB collections in AQL queries. It takes a collection name and an optional iterator. 12 | It provides methods for accessing fields within the collection and compiles the collection name into its AQL 13 | representation. 14 | 15 | ### **`LiteralExpression`** 16 | 17 | Represents literal expressions in `AQL`. Inherits from BindableExpression and have a 18 | representation as **`?`**, which is likely a placeholder for a value to be bound later. 19 | 20 | ### **`FieldExpression`** 21 | 22 | Represents field accesses in `AQL` queries. This class handles accessing fields or attributes of 23 | documents or objects within queries. It provides functionalities like: 24 | 25 | Accessing nested fields. 26 | Generating proper `AQL` syntax for field access. 27 | Overloaded operators to produce conditional expressions. This allows users to write Pythonic 28 | expressions for arithmetic operations (**`+`**, **`-`**, **`*`**, **`/`**, **`%`**) and 29 | comparisons (**`==`**, **`>`**, **`<`**, **`>=`**, **`<=`**), 30 | which get translated into corresponding `AQL` expressions. 31 | 32 | ### **`VariableExpression`** 33 | 34 | Represents variable expressions in `AQL`. This class allows for dynamic variable names and access to 35 | fields within the variable. 36 | 37 | ### **`IteratorExpression`** 38 | 39 | This class inherits from **VariableExpression** and represent an iterator in AQL queries. 40 | Iterators are used in FOR loops in AQL to iterate over a set of values or documents. 41 | 42 | ## **FigurativeExpression** 43 | 44 | Subclasses: 45 | 46 | - **`BindableExpression`** 47 | - **`ReturnableExpression`** 48 | 49 | Abstract class for non `LiteralExpression` 50 | 51 | ### **`ListExpression`** 52 | 53 | Subclasses: 54 | 55 | - **`BindableExpression`** 56 | - **`ReturnableExpression`** 57 | 58 | Represents lists in AQL. This class can take a variety of item types, including: 59 | 60 | - query expressions 61 | - literals 62 | - mappings 63 | - sequences 64 | - basic data types like int, float, str, and bool. 65 | 66 | Handles nested structures, converting nested lists into appropriate AQL representations. 67 | 68 | ### **`ObjectExpression`** 69 | 70 | Represents objects (like dictionaries or AQL documents) in queries. 71 | This class Can take a variety of key-value pairs, including: 72 | 73 | - query expressions 74 | - literals 75 | - mappings 76 | - sequences 77 | - basic data types like int, float, str, and bool. 78 | 79 | Handles nested structures, converting nested dictionaries and lists into appropriate AQL representations. 80 | Possesses a \_bind attribute for binding values to the object. 81 | 82 | ## **Iterable Expressions** 83 | 84 | ### **`RangeExpression`** 85 | 86 | Represents a range in AQL queries, such as specifying a range of numbers. It support both 87 | literal values and other expressions for the start and end of the range. 88 | 89 | ### **`AssignmentExpression`** 90 | 91 | Represents an assignment operation in AQL, like setting a variable's value. 92 | 93 | ## **Binary Expressions** 94 | 95 | ### **`BinaryLogicalExpression`** 96 | 97 | A subclass of ConditionExpression that represents binary logical operations in AQL. The comment 98 | suggests it supports operations like **`&&`** (AND) and **`||`** (OR). 99 | 100 | ### **`BinaryArithmeticExpression`** 101 | 102 | ## **Unary Logical Expressions** 103 | 104 | This class might represent unary logical operations, though specific operations aren't immediately clear from this 105 | snippet. **`NOT`** 106 | 107 | ### **`NotExpression`** 108 | 109 | represents the **`NOT`** operation in AQL. 110 | 111 | ## **BinaryLogicalExpression** 112 | 113 | ### **`AndExpression`** 114 | 115 | Subclasses of BinaryLogicalExpression that represent the logical AND (&&) 116 | operations, respectively. 117 | 118 | ### **`OrExpression`** 119 | 120 | represent the logical OR (||) operations. 121 | 122 | ### **`ConditionExpression`** 123 | 124 | Inherits from BinaryExpression and LogicalExpression. This class represent conditional 125 | operations in AQL, like comparisons (e.g., `>`, `>=`, `==`, `!=`, `<`, `<=`). 126 | It also supports chaining of conditions using logical operators `AND` and `OR`. 127 | 128 | ### **`In`** 129 | 130 | A subclass of ConditionExpression that represent the **`IN`** operation in AQL, where an object is checked if 131 | it's part of an iterable. 132 | 133 | ## **Arithmetic Expressions** 134 | 135 | ### **`UnaryArithmeticExpression`** 136 | 137 | Inherits from both UnaryExpression and BaseArithmeticExpression. This class represents unary 138 | arithmetic operations in AQL. **`-1`** 139 | 140 | ### **`ArithmeticExpression`** 141 | 142 | Inherits from both BinaryExpression and BaseArithmeticExpression. Represents binary arithmetic 143 | operations in AQL. It also has overloaded comparison operators to form conditions from arithmetic results. 144 | 145 | ## **Query Expressions** 146 | 147 | ### **`QueryExpression`** 148 | 149 | An abstract base class that represents an AQL Query. 150 | 151 | ### **`SubQueryExpression`** 152 | 153 | Represents subqueries in AQL. Subqueries are queries embedded within other queries. This class wraps 154 | around another QueryExpression to represent the subquery. The compile method seems to format the subquery for inclusion 155 | in the main query. 156 | 157 | ## **Query Result Expressions** 158 | 159 | ### **`ScalarSubQuery`** 160 | 161 | Inherits from SubQueryExpression. The details aren't fully visible, but this might represent a subquery 162 | that returns a scalar value. 163 | 164 | ### **`VectorSubQueryExpression`** 165 | 166 | Inherits from both SubQueryExpression and IterableExpression. This class likely represents 167 | subqueries that return a list or array of results. 168 | 169 | ## **Modification Variables** 170 | 171 | ### **`NEW`** 172 | 173 | A subclass of ModificationVariable representing the "NEW" keyword in `AQL`, which might be used to refer to the new 174 | version of a document after an update or replace operation. 175 | 176 | ### **`OLD`** 177 | 178 | A subclass of ModificationVariable represents the "OLD" keyword in `AQL`, likely referring to the 179 | previous version of a document before an update or replace operation. 180 | 181 | ## **Sort Expressions** 182 | 183 | ### **`SortExpression`** 184 | 185 | Represents a `SORT` expression in `AQL`. This class handles sorting of query results. 186 | 187 | #### **SortDirection** 188 | 189 | An enumeration defining sorting directions - ASC for ascending and DESC for descending. 190 | 191 | ## **Abstract Expressions** 192 | 193 | ### **`Expression`** 194 | 195 | An abstract base class for all types of expressions. It mandates an abstract method compile which would be 196 | essential for turning the Pythonic expression into `AQL` syntax. 197 | 198 | ### **`BindableExpression`** 199 | 200 | A subclass of Expression which appears to represent expressions that can be bound to specific values. 201 | 202 | ### **`ReturnableExpression`** 203 | 204 | An abstract base class representing returnable expressions. This might be a base for expressions that 205 | can be part of the RETURN statement in `AQL`. 206 | 207 | ### **`IterableExpression`** 208 | 209 | An abstract base class that represents an iterable expressions in AQL. 210 | 211 | ### **`ModificationVariable`** 212 | 213 | Subclasses: 214 | **`VariableExpression`** 215 | 216 | variables used in modification queries (like `UPDATE`, `INSERT`, `UPSERT`, `REPLACE`). 217 | 218 | ### **`ReturnableIterableExpression`** 219 | 220 | Subclasses: 221 | 222 | - **`IterableExpression`** 223 | - **`ReturnableExpression`** 224 | 225 | This class marks that some iterable expressions can be returned in AQL queries. 226 | 227 | ### **`UnaryExpression`** 228 | 229 | Represents unary operations in AQL queries, like NOT or negation. 230 | 231 | ### **`BinaryExpression`** 232 | 233 | Represents binary operations in AQL queries, such as arithmetic operations (like addition or 234 | multiplication) or logical operations (like `AND` or `OR`). 235 | 236 | ### **`LogicalExpression`** 237 | 238 | An abstract base class representing logical expressions in AQL queries. This serves as a foundational 239 | class for both unary and binary logical operations. 240 | 241 | ### **`BaseArithmeticExpression`** 242 | 243 | Abstract base class for arithmetic operations 244 | -------------------------------------------------------------------------------- /pydango/orm/models/vertex.py: -------------------------------------------------------------------------------- 1 | from typing import ( 2 | TYPE_CHECKING, 3 | Any, 4 | ForwardRef, 5 | Generic, 6 | Optional, 7 | Type, 8 | TypeVar, 9 | Union, 10 | cast, 11 | ) 12 | 13 | from pydantic.v1.fields import Field, ModelField 14 | from pydantic.v1.main import create_model 15 | from pydantic.v1.typing import evaluate_forwardref 16 | 17 | from pydango.orm.consts import EDGES 18 | from pydango.orm.encoders import jsonable_encoder 19 | from pydango.orm.models import BaseArangoModel, CollectionConfig, CollectionType 20 | from pydango.orm.models.base import LIST_TYPES, ArangoModelMeta, LinkTypes 21 | from pydango.orm.models.edge import EdgeData, EdgeDict 22 | from pydango.orm.models.types import EdgeFieldMapping, Relationships 23 | from pydango.orm.models.utils import convert_edge_data_to_valid_kwargs 24 | from pydango.orm.utils import evaluate_forward_ref, get_globals 25 | 26 | if TYPE_CHECKING: 27 | from pydantic.v1.typing import AbstractSetIntStr, DictStrAny, MappingIntStrAny 28 | 29 | TVertexModel = TypeVar("TVertexModel", bound="VertexModel") 30 | TEdges = TypeVar("TEdges", bound=EdgeData) 31 | 32 | 33 | class VertexCollectionConfig(CollectionConfig): 34 | type = CollectionType.NODE 35 | 36 | 37 | class VertexMeta(ArangoModelMeta): 38 | def __new__(mcs, name: str, bases: tuple[Type], namespace: dict, **kwargs: Any): 39 | parents = [b for b in bases if isinstance(b, mcs)] 40 | if not parents: 41 | return super().__new__(mcs, name, bases, namespace, **kwargs) 42 | _relationships, original_annotations = mcs.get_relations_from_namespace(namespace) 43 | __edge_to_field_mapping__, edge_annotation = mcs.build_edges_model(_relationships, bases, name, namespace) 44 | 45 | namespace["__edge_to_field_mapping__"] = __edge_to_field_mapping__ 46 | namespace["__annotations__"][EDGES] = edge_annotation 47 | 48 | return super().__new__(mcs, name, bases, namespace, **kwargs) 49 | 50 | @staticmethod 51 | def build_edges_model( 52 | _relationships: Relationships, bases: tuple[Type[Any]], name: str, namespace: dict[str, Any] 53 | ) -> tuple[EdgeFieldMapping, ModelField]: 54 | if VertexModel in bases: 55 | edges_model = VertexMeta._build_model(_relationships, name) 56 | namespace[EDGES] = Field(None, exclude=True) 57 | edge_annotation = cast(Any, Optional[edges_model]) 58 | else: 59 | namespace[EDGES] = Field(None, exclude=True) 60 | edge_annotation = cast(Any, None) 61 | 62 | __edge_to_field_mapping__ = VertexMeta._build_edge_to_field_mapping(_relationships) 63 | 64 | VertexMeta._validate_edges(__edge_to_field_mapping__, namespace) 65 | return __edge_to_field_mapping__, edge_annotation 66 | 67 | @staticmethod 68 | def _build_edge_to_field_mapping(relationships: Relationships) -> EdgeFieldMapping: 69 | __edge_to_field_mapping__: EdgeFieldMapping = {} 70 | for relation_field, relation_info in relationships.items(): 71 | if not relation_info.via_model: 72 | continue 73 | if isinstance(relation_info.via_model, ForwardRef): 74 | __edge_to_field_mapping__.setdefault(relation_info.via_model, []).append(cast(str, relation_field)) 75 | elif issubclass(relation_info.via_model, BaseArangoModel): 76 | __edge_to_field_mapping__.setdefault(relation_info.via_model.Collection.name, []).append(relation_field) 77 | return __edge_to_field_mapping__ 78 | 79 | @staticmethod 80 | def _validate_edges(edge_to_field_mapping: EdgeFieldMapping, namespace: dict[str, Any]) -> None: 81 | errors: dict[Union[str, ForwardRef], list[str]] = {} 82 | items = edge_to_field_mapping.items() 83 | for coll_or_forward_ref, fields in items: 84 | if len(fields) > 1: 85 | for i, f in enumerate(fields): 86 | func = getattr(namespace.get("Collection"), f) 87 | if func: 88 | if not callable(func): 89 | raise ValueError(f"{func} is not callable") 90 | fields[i] = func 91 | 92 | else: 93 | errors.setdefault(coll_or_forward_ref, []).append(f) 94 | if errors: 95 | raise AttributeError(f"you must define the following Collection functions for distinction {dict(errors)}") 96 | 97 | @staticmethod 98 | def _build_model(relationships: Relationships, name: str): 99 | __edge_namespace__: dict[str, Any] = {} 100 | for field, relation_info in relationships.items(): 101 | via_model = relation_info.via_model 102 | if relation_info.link_type in LIST_TYPES: 103 | if relation_info.link_type in (LinkTypes.OPTIONAL_EDGE_LIST, LinkTypes.OPTIONAL_LIST): 104 | __edge_namespace__[field] = (Optional[list[via_model]], None) # type: ignore[valid-type] 105 | else: 106 | __edge_namespace__[field] = (list[via_model], ...) # type: ignore[valid-type] 107 | 108 | elif relation_info.link_type in (LinkTypes.OPTIONAL_EDGE, LinkTypes.OPTIONAL_DIRECT): 109 | __edge_namespace__[field] = (Optional[via_model], None) 110 | else: 111 | __edge_namespace__[field] = (via_model, ...) # type: ignore[assignment] 112 | m = create_model(f"{name}Edges", **__edge_namespace__, __base__=EdgeData) 113 | return m 114 | 115 | 116 | class VertexModel(BaseArangoModel, Generic[TEdges], metaclass=VertexMeta): 117 | if TYPE_CHECKING: 118 | edges: TEdges 119 | __edge_to_field_mapping__: dict[Union[str, ForwardRef], list[str]] = {} 120 | 121 | class Collection(VertexCollectionConfig): ... 122 | 123 | def __init__(self, **data: Any): 124 | if EDGES in data: 125 | convert_edge_data_to_valid_kwargs(data[EDGES]) 126 | 127 | super().__init__(**data) 128 | 129 | if EDGES not in data: # note: enables dot notation for edges field 130 | object.__setattr__(self, EDGES, EdgeDict()) 131 | 132 | def dict( 133 | self, 134 | *, 135 | include: Optional[Union["AbstractSetIntStr", "MappingIntStrAny"]] = None, 136 | exclude: Optional[Union["AbstractSetIntStr", "MappingIntStrAny"]] = None, 137 | by_alias: bool = False, 138 | skip_defaults: Optional[bool] = None, 139 | exclude_unset: bool = False, 140 | exclude_defaults: bool = False, 141 | exclude_none: bool = False, 142 | include_edges: bool = False, 143 | ) -> "DictStrAny": 144 | if include_edges and self.__exclude_fields__: 145 | cast(dict, self.__exclude_fields__).pop("edges") 146 | 147 | try: 148 | super__dict = super().dict( 149 | include=include, 150 | exclude=exclude, 151 | by_alias=by_alias, 152 | skip_defaults=skip_defaults, 153 | exclude_unset=exclude_unset, 154 | exclude_defaults=exclude_defaults, 155 | exclude_none=exclude_none, 156 | ) 157 | except RecursionError as e: 158 | raise AssertionError( 159 | "is not possible to call .dict() when using recursive model, instead traverse the graph and collect" 160 | " data or exclude recursive fields" 161 | ) from e 162 | 163 | if ( 164 | self.__exclude_fields__ is None 165 | or EDGES not in self.__exclude_fields__ 166 | and isinstance(self.edges, EdgeDict) 167 | and self.edges.__class__ == EdgeDict 168 | and not self.edges 169 | ): 170 | super__dict[EDGES] = None 171 | 172 | return super__dict 173 | 174 | def save_dict(self) -> "DictStrAny": 175 | return jsonable_encoder(self, by_alias=True, exclude=cast(set, self.__relationships_fields__.keys())) 176 | 177 | @classmethod 178 | def update_forward_refs(cls, **localns: Any) -> None: 179 | super().update_forward_refs(**localns) 180 | 181 | for k in cls.__edge_to_field_mapping__.copy(): 182 | if isinstance(k, ForwardRef): 183 | funcs = cls.__edge_to_field_mapping__.pop(k) 184 | new_k = evaluate_forward_ref(cls, k, **localns) 185 | if new_k in cls.__edge_to_field_mapping__: 186 | cls.__edge_to_field_mapping__[new_k.Collection.name].extend(funcs) 187 | else: 188 | cls.__edge_to_field_mapping__[new_k.Collection.name] = funcs 189 | 190 | globalns = get_globals(cls) 191 | 192 | for fields, model_field in cls.__fields__[EDGES].type_.__fields__.items(): 193 | if isinstance(model_field.type_, ForwardRef): 194 | model_field.type_ = evaluate_forwardref(model_field.type_, globalns, localns) 195 | 196 | cls.__fields__[EDGES].type_.update_forward_refs(**localns, **globalns) 197 | -------------------------------------------------------------------------------- /pydango/connection/session.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import dataclasses 3 | import json 4 | import logging 5 | from typing import ( 6 | TYPE_CHECKING, 7 | Awaitable, 8 | Callable, 9 | MutableMapping, 10 | Optional, 11 | Sequence, 12 | Type, 13 | Union, 14 | cast, 15 | overload, 16 | ) 17 | 18 | from aioarango import ArangoClient 19 | from aioarango.collection import Collection, StandardCollection 20 | from aioarango.database import StandardDatabase 21 | from aioarango.exceptions import AQLQueryExecuteError 22 | from aioarango.result import Result 23 | from aioarango.typings import Json 24 | 25 | from pydango.connection.consts import PYDANGO_SESSION_KEY 26 | from pydango.connection.exceptions import ( 27 | DocumentNotFoundError, 28 | SessionNotInitializedError, 29 | ) 30 | from pydango.connection.graph_utils import db_traverse, graph_to_document 31 | from pydango.connection.query_utils import ( 32 | _build_graph_query, 33 | _get_upsert_filter, 34 | _make_upsert_query, 35 | ) 36 | from pydango.connection.types import CollectionUpsertOptions, UpdateStrategy 37 | from pydango.connection.utils import get_or_create_db 38 | from pydango.indexes import ( 39 | FullTextIndex, 40 | GeoIndex, 41 | HashIndex, 42 | Indexes, 43 | PersistentIndex, 44 | SkipListIndex, 45 | TTLIndex, 46 | ) 47 | from pydango.orm.models import BaseArangoModel, VertexModel 48 | from pydango.orm.query import ORMQuery 49 | from pydango.query import AQLQuery 50 | from pydango.query.expressions import IteratorExpression, VariableExpression 51 | from pydango.query.functions import Document 52 | from pydango.query.operations import TraversalDirection 53 | from pydango.query.query import TraverseIterators 54 | 55 | if TYPE_CHECKING: 56 | from pydango.orm.models.base import ArangoModel 57 | from pydango.orm.models.vertex import TVertexModel 58 | from pydango.query.types import Range 59 | 60 | logger = logging.getLogger(__name__) 61 | _INDEX_MAPPING: dict[Type[Indexes], Callable[..., Awaitable["Result[Json]"]]] = { 62 | GeoIndex: Collection.add_geo_index, 63 | HashIndex: Collection.add_hash_index, 64 | SkipListIndex: Collection.add_skiplist_index, 65 | FullTextIndex: Collection.add_fulltext_index, 66 | PersistentIndex: Collection.add_persistent_index, 67 | TTLIndex: Collection.add_ttl_index, 68 | } 69 | 70 | 71 | def _collection_from_model(database: StandardDatabase, model: Type[BaseArangoModel]) -> StandardCollection: 72 | return database.collection(model.Collection.name) 73 | 74 | 75 | class PydangoSession: 76 | @overload 77 | def __init__(self, *, database: StandardDatabase): ... 78 | 79 | @overload 80 | def __init__( 81 | self, *, client: ArangoClient, database: str, username: str = "", password: str = "", auth_method: str = "basic" 82 | ): ... 83 | 84 | def __init__( 85 | self, 86 | *, 87 | client: Optional[ArangoClient] = None, 88 | database: Union[StandardDatabase, str], 89 | username: str = "root", 90 | password: str = "", 91 | auth_method: str = "basic", 92 | ): 93 | if isinstance(database, str): 94 | if client is None: 95 | raise ValueError("client is required when database is a string") 96 | self._db_name = database 97 | self.database = None 98 | self.password = password 99 | self.username = username 100 | self.client = client 101 | self.auth_method = auth_method 102 | elif isinstance(database, StandardDatabase): 103 | self.database = database 104 | else: 105 | raise ValueError("database should be a string or a StandardDatabase instance") 106 | 107 | async def initialize(self): 108 | if self.database is None: 109 | self.database = await get_or_create_db( 110 | self.client, self._db_name, user=self.username, password=self.password 111 | ) 112 | 113 | @property 114 | def initialized(self): 115 | return isinstance(self.database, StandardDatabase) 116 | 117 | @staticmethod 118 | async def create_indexes(collection: StandardCollection, model: Type["ArangoModel"]) -> Sequence[Result[Json]]: 119 | if model.Collection.indexes: 120 | logger.debug("creating indexes", extra=dict(indexes=model.Collection.indexes, model=model)) 121 | index_requests = [] 122 | for i in model.Collection.indexes or []: 123 | if isinstance(i, dict): 124 | index_requests.append(_INDEX_MAPPING[i["type"]](collection, **i)) 125 | else: 126 | index_requests.append(_INDEX_MAPPING[i.__class__](collection, **dataclasses.asdict(i))) 127 | 128 | return cast(list[Result[Json]], await asyncio.gather(*index_requests)) 129 | 130 | async def save( 131 | self, 132 | document: "ArangoModel", 133 | strategy: UpdateStrategy = UpdateStrategy.UPDATE, 134 | # todo: follow_links: bool = False, 135 | collection_options: Union[CollectionUpsertOptions, None] = None, 136 | ) -> Union["ArangoModel", "TVertexModel"]: 137 | model_fields_mapping = None 138 | if isinstance(document, VertexModel): 139 | model_fields_mapping, vertices_ids, edge_ids, query = _build_graph_query( 140 | document, collection_options=collection_options 141 | ) 142 | else: 143 | options = ( 144 | collection_options 145 | and (collection_options.get(document.Collection.name) or collection_options.get(document.__class__)) 146 | or None 147 | ) 148 | 149 | filter_ = _get_upsert_filter(document) 150 | query = _make_upsert_query(filter_, document, document, ORMQuery(), strategy, options) 151 | 152 | try: 153 | cursor = await self.execute(query) 154 | except AQLQueryExecuteError as e: 155 | logger.exception(query) 156 | raise e 157 | else: 158 | result = await cursor.next() 159 | if model_fields_mapping: 160 | db_traverse(cast(VertexModel, document), set(), result, model_fields_mapping, vertices_ids, edge_ids) 161 | logger.debug("cursor stats", extra=cursor.statistics()) 162 | return document 163 | 164 | async def get( 165 | self, 166 | model: Type["ArangoModel"], 167 | key: str, 168 | should_raise: bool = False, 169 | fetch_edges: Union[set[str], bool] = False, 170 | # fetch_edges_data: Union[set[str], bool] = False, 171 | fetch_path: bool = False, 172 | depth: "Range" = range(1, 1), 173 | prune: bool = False, 174 | projection: Optional[Type["ArangoModel"]] = None, 175 | return_raw: bool = False, 176 | ) -> Optional[Union["TVertexModel", "ArangoModel"]]: 177 | collection = model.Collection.name 178 | _id = f"{collection}/{key}" 179 | d = Document(_id) 180 | doc = VariableExpression() 181 | main_query = ORMQuery().let(doc, d) 182 | return_: Union[VariableExpression, dict[str, VariableExpression]] = doc 183 | edges: Sequence[str] 184 | if fetch_edges: 185 | if isinstance(fetch_edges, set): 186 | edges = cast(Sequence[str], tuple(fetch_edges)) 187 | else: 188 | _edges = [] 189 | for i in model.__relationships__.values(): 190 | if i.via_model: 191 | _edges.append(i.via_model.Collection.name) 192 | edges = _edges 193 | 194 | v = IteratorExpression("v") 195 | iterators = [v] 196 | e = IteratorExpression("e") 197 | iterators.append(e) 198 | 199 | if fetch_path: 200 | p = IteratorExpression("p") 201 | iterators.append(p) 202 | traversal_result = VariableExpression() 203 | 204 | traversal_iterators: TraverseIterators = cast(TraverseIterators, tuple(iterators)) 205 | traversal = ( 206 | ORMQuery() 207 | .traverse(traversal_iterators, edges, _id, depth, TraversalDirection.OUTBOUND) 208 | .return_({"v": iterators[0], "e": iterators[1]}) 209 | ) 210 | main_query.let(traversal_result, traversal) 211 | return_ = {"doc": doc, "edges": traversal_result} 212 | 213 | main_query.return_(return_) 214 | 215 | cursor = await self.execute(main_query) 216 | result = await cursor.next() 217 | if not result or (fetch_edges and not result.get("doc")): 218 | raise DocumentNotFoundError(_id) 219 | 220 | if issubclass(model, VertexModel): 221 | result, recursive = graph_to_document(result, model) 222 | 223 | if return_raw: 224 | return result 225 | 226 | result[PYDANGO_SESSION_KEY] = self 227 | if result is None and should_raise: 228 | raise DocumentNotFoundError() 229 | 230 | if projection: 231 | document = projection.from_orm(result, session=self) 232 | else: 233 | document = model.from_orm(result, session=self) 234 | 235 | return document 236 | 237 | async def find(self, model: Type[BaseArangoModel], filters=None, skip=None, limit=None): 238 | if self.database is None: 239 | raise SessionNotInitializedError( 240 | "you should call `await session.initialize()` before using the session or initialize it in the" 241 | " constructor with `StandardDatabase`" 242 | ) 243 | collection = _collection_from_model(self.database, model) 244 | return await collection.find(filters, skip, limit) 245 | 246 | async def execute(self, query: "AQLQuery", **options): 247 | if self.database is None: 248 | raise SessionNotInitializedError( 249 | f"you should call `await {self.initialize.__name__}` before using the session or initialize it in the" 250 | " constructor with `StandardDatabase`" 251 | ) 252 | prepared_query = query.prepare() 253 | logger.debug( 254 | "executing query", extra={"query": prepared_query.query, "bind_vars": json.dumps(prepared_query.bind_vars)} 255 | ) 256 | return await self.database.aql.execute( 257 | prepared_query.query, bind_vars=cast(MutableMapping, prepared_query.bind_vars), **options 258 | ) 259 | -------------------------------------------------------------------------------- /tests/session/test_social_network.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | from typing import Annotated, Any, Iterable, List, Optional, Type, Union 3 | 4 | import pytest 5 | from _pytest.fixtures import FixtureRequest 6 | from pydiction import ANY_NOT_NONE, Contains, Matcher 7 | 8 | from pydango.connection.session import PydangoSession 9 | from pydango.orm.models import BaseArangoModel, EdgeModel, VertexModel 10 | from pydango.orm.models.base import Relation 11 | from pydango.orm.models.edge import EdgeCollectionConfig 12 | from pydango.orm.models.vertex import VertexCollectionConfig 13 | from pydango.utils import init_models 14 | 15 | 16 | class Post(VertexModel): 17 | title: str 18 | content: str 19 | # todo: make this work 20 | # author: Annotated["User", BackRelation["Authorship"]] 21 | comments: Annotated[Optional[List["Comment"]], Relation["Commentary"]] = None 22 | 23 | class Collection(VertexCollectionConfig): 24 | name = "posts" 25 | 26 | 27 | class Comment(VertexModel): 28 | text: str 29 | 30 | class Collection(VertexCollectionConfig): 31 | name = "comments" 32 | 33 | 34 | class User(VertexModel): 35 | name: str 36 | email: str 37 | age: int 38 | friends: Annotated[Optional[List["User"]], Relation["Friendship"]] = None 39 | posts: Annotated[Optional[List["Post"]], Relation["Authorship"]] = None 40 | comments: Annotated[Optional[List["Comment"]], Relation["Commentary"]] 41 | likes: Annotated[Optional[List[Union["Post", "Comment"]]], Relation["Like"]] = None 42 | 43 | class Collection(VertexCollectionConfig): 44 | name = "users" 45 | 46 | 47 | class Friendship(EdgeModel): 48 | since: datetime.date 49 | 50 | class Collection(EdgeCollectionConfig): 51 | name = "friendships" 52 | 53 | 54 | class Authorship(EdgeModel): 55 | created_at: datetime.datetime 56 | 57 | class Collection(EdgeCollectionConfig): 58 | name = "authorships" 59 | 60 | 61 | class Commentary(EdgeModel): 62 | commented_at: datetime.datetime 63 | 64 | class Collection(EdgeCollectionConfig): 65 | name = "commentaries" 66 | 67 | 68 | class Like(EdgeModel): 69 | liked_at: datetime.datetime 70 | 71 | class Collection(EdgeCollectionConfig): 72 | name = "likes" 73 | 74 | 75 | Post.update_forward_refs() 76 | Comment.update_forward_refs() 77 | User.update_forward_refs() 78 | 79 | 80 | @pytest.fixture(scope="module", autouse=True) 81 | async def init_collections(session: PydangoSession): 82 | models: Iterable[Type[BaseArangoModel]] = (Post, Comment, User, Friendship, Authorship, Commentary, Like) 83 | await init_models(session, *models) 84 | 85 | 86 | @pytest.fixture() 87 | def user(): 88 | user1 = User(name="John", email="john@example.com", age=25) 89 | user2 = User(name="Alice", email="alice@example.com", age=21) 90 | post1 = Post(title="First Post", content="This is my first post!") 91 | comment1 = Comment( 92 | text="Great post!", 93 | ) 94 | 95 | now = datetime.datetime.now() 96 | authorship1 = Authorship(created_at=now) 97 | commentary1 = Commentary(commented_at=now) 98 | like1 = Like(liked_at=now) 99 | like2 = Like(liked_at=now) 100 | 101 | user1.likes = [comment1, post1] 102 | user1.comments = [comment1] 103 | user1.posts = [post1] 104 | user1.friends = [user2] 105 | 106 | user1.edges = { 107 | User.comments: [commentary1], 108 | User.posts: [authorship1], 109 | User.likes: [like1, like2], 110 | User.friends: [Friendship(since=now)], 111 | } 112 | 113 | post1.comments = [comment1] 114 | post1.edges = {Post.comments: [commentary1]} 115 | return user1 116 | 117 | 118 | def expected_user_depth1(user: VertexModel) -> dict[str, Any]: 119 | return { 120 | "_id": ANY_NOT_NONE, 121 | "_key": ANY_NOT_NONE, 122 | "_rev": ANY_NOT_NONE, 123 | "name": "John", 124 | "age": 25, 125 | "comments": [{"_id": ANY_NOT_NONE, "_key": ANY_NOT_NONE, "_rev": ANY_NOT_NONE, "text": "Great post!"}], 126 | "edges": { 127 | "comments": [ 128 | { 129 | "_from": ANY_NOT_NONE, 130 | "_id": ANY_NOT_NONE, 131 | "_key": ANY_NOT_NONE, 132 | "_rev": ANY_NOT_NONE, 133 | "_to": ANY_NOT_NONE, 134 | "commented_at": user.edges.comments[0].commented_at, 135 | }, 136 | ], 137 | "friends": [ 138 | { 139 | "_from": ANY_NOT_NONE, 140 | "_id": ANY_NOT_NONE, 141 | "_key": ANY_NOT_NONE, 142 | "_rev": ANY_NOT_NONE, 143 | "_to": ANY_NOT_NONE, 144 | "since": user.edges.friends[0].since, 145 | } 146 | ], 147 | "likes": [ 148 | { 149 | "_from": ANY_NOT_NONE, 150 | "_id": ANY_NOT_NONE, 151 | "_key": ANY_NOT_NONE, 152 | "_rev": ANY_NOT_NONE, 153 | "_to": ANY_NOT_NONE, 154 | "liked_at": user.edges.likes[0].liked_at, 155 | }, 156 | { 157 | "_from": ANY_NOT_NONE, 158 | "_id": ANY_NOT_NONE, 159 | "_key": ANY_NOT_NONE, 160 | "_rev": ANY_NOT_NONE, 161 | "_to": ANY_NOT_NONE, 162 | "liked_at": user.edges.likes[1].liked_at, 163 | }, 164 | ], 165 | "posts": [ 166 | { 167 | "_from": ANY_NOT_NONE, 168 | "_id": ANY_NOT_NONE, 169 | "_key": ANY_NOT_NONE, 170 | "_rev": ANY_NOT_NONE, 171 | "_to": ANY_NOT_NONE, 172 | "created_at": user.edges.posts[0].created_at, 173 | } 174 | ], 175 | }, 176 | "email": "john@example.com", 177 | "friends": [ 178 | { 179 | "_id": ANY_NOT_NONE, 180 | "_key": ANY_NOT_NONE, 181 | "_rev": ANY_NOT_NONE, 182 | "age": 21, 183 | "comments": None, 184 | "edges": None, 185 | "email": "alice@example.com", 186 | "friends": None, 187 | "likes": None, 188 | "name": "Alice", 189 | "posts": None, 190 | } 191 | ], 192 | "likes": Contains( 193 | [ 194 | {"_id": ANY_NOT_NONE, "_key": ANY_NOT_NONE, "_rev": ANY_NOT_NONE, "text": "Great post!"}, 195 | { 196 | "_id": ANY_NOT_NONE, 197 | "_key": ANY_NOT_NONE, 198 | "_rev": ANY_NOT_NONE, 199 | "content": "This is my first post!", 200 | "title": "First Post", 201 | }, 202 | ] 203 | ), 204 | "posts": [ 205 | { 206 | "_id": ANY_NOT_NONE, 207 | "_key": ANY_NOT_NONE, 208 | "_rev": ANY_NOT_NONE, 209 | "comments": None, 210 | "content": "This is my first post!", 211 | "title": "First Post", 212 | } 213 | ], 214 | } 215 | 216 | 217 | def expected_user_depth2(user: VertexModel): 218 | new_user: dict[str, Any] = expected_user_depth1(user) 219 | new_user.update( 220 | { 221 | "likes": Contains( 222 | [ 223 | {"_id": ANY_NOT_NONE, "_key": ANY_NOT_NONE, "_rev": ANY_NOT_NONE, "text": "Great post!"}, 224 | { 225 | "_id": ANY_NOT_NONE, 226 | "_key": ANY_NOT_NONE, 227 | "_rev": ANY_NOT_NONE, 228 | "content": "This is my first post!", 229 | "title": "First Post", 230 | "comments": [ 231 | {"text": "Great post!", "_id": ANY_NOT_NONE, "_rev": ANY_NOT_NONE, "_key": ANY_NOT_NONE} 232 | ], 233 | }, 234 | ], 235 | ), 236 | "posts": [ 237 | { 238 | "_id": ANY_NOT_NONE, 239 | "_key": ANY_NOT_NONE, 240 | "_rev": ANY_NOT_NONE, 241 | "comments": [ 242 | {"_id": ANY_NOT_NONE, "_key": ANY_NOT_NONE, "_rev": ANY_NOT_NONE, "text": "Great post!"} 243 | ], 244 | "content": "This is my first post!", 245 | "title": "First Post", 246 | } 247 | ], 248 | } 249 | ) 250 | return new_user 251 | 252 | 253 | @pytest.mark.run(order=1) 254 | @pytest.mark.asyncio 255 | async def test_save(matcher: Matcher, session: PydangoSession, request: FixtureRequest, user: User): 256 | await session.save(user) 257 | request.config.cache.set("user_key", user.key) # type: ignore[union-attr] 258 | matcher.assert_declarative_object(user.dict(by_alias=True, include_edges=True), expected_user_depth2(user)) 259 | 260 | 261 | @pytest.mark.run(order=2) 262 | async def test_get(matcher: Matcher, session: PydangoSession, request: FixtureRequest): 263 | _id = request.config.cache.get("user_key", None) # type: ignore[union-attr] 264 | result = await session.get(User, _id, fetch_edges=True, depth=range(1, 1)) 265 | assert result 266 | expected_user = expected_user_depth1(result) 267 | matcher.assert_declarative_object( 268 | result.dict(by_alias=True, include_edges=True), 269 | expected_user, 270 | check_order=False, 271 | ) 272 | 273 | 274 | @pytest.mark.run(order=2) 275 | async def test_get_lazy_proxy_fetch(matcher: Matcher, session: PydangoSession, request: FixtureRequest): 276 | _id = request.config.cache.get("user_key", None) # type: ignore[union-attr] 277 | result = await session.get(User, _id, fetch_edges=True, depth=range(1, 1)) 278 | assert result 279 | 280 | await result.posts[0].comments.fetch() # type: ignore 281 | 282 | expected_user = expected_user_depth1(result) 283 | 284 | expected_posts_comments = [{"_id": ANY_NOT_NONE, "_key": ANY_NOT_NONE, "_rev": ANY_NOT_NONE, "text": "Great post!"}] 285 | matcher.assert_declarative_object(result.dict(by_alias=True)["posts"][0]["comments"], expected_posts_comments) 286 | if result.posts: 287 | result.posts[0].comments = None 288 | matcher.assert_declarative_object( 289 | result.dict(by_alias=True, include_edges=True), 290 | expected_user, 291 | check_order=False, 292 | ) 293 | 294 | 295 | @pytest.mark.run(order=2) 296 | async def test_get2(matcher: Matcher, session: PydangoSession, request: FixtureRequest): 297 | _id = request.config.cache.get("user_key", None) # type: ignore[union-attr] 298 | result = await session.get(User, _id, fetch_edges=True, depth=range(1, 2)) 299 | assert result 300 | result_dict = result.dict(by_alias=True, include_edges=True) 301 | depth = expected_user_depth2(result) 302 | matcher.assert_declarative_object(result_dict, depth, check_order=False) 303 | -------------------------------------------------------------------------------- /pydango/query/functions.py: -------------------------------------------------------------------------------- 1 | import json 2 | from abc import ABC 3 | from typing import TYPE_CHECKING, Optional, Union 4 | 5 | from pydango.query.expressions import ( 6 | Expression, 7 | IterableExpression, 8 | ListExpression, 9 | LiteralExpression, 10 | ObjectExpression, 11 | QueryExpression, 12 | ReturnableExpression, 13 | ) 14 | 15 | if TYPE_CHECKING: 16 | from pydango.query.expressions import VariableExpression 17 | 18 | 19 | class BaseFunctionExpression(Expression): 20 | def __init__(self, name, *arguments): 21 | self.name = name 22 | arguments = list(arguments) 23 | 24 | for i, arg in enumerate(arguments): 25 | if isinstance(arg, dict): 26 | arguments[i] = ObjectExpression(arg) 27 | if isinstance(arg, list): 28 | arguments[i] = ListExpression(arg) 29 | 30 | self.arguments = arguments 31 | 32 | def compile(self, query_ref: "QueryExpression") -> str: 33 | arguments = [] 34 | for arg in self.arguments: 35 | if isinstance(arg, QueryExpression): 36 | arg.parent = query_ref 37 | 38 | if isinstance(arg, Expression): 39 | value = arg.compile(query_ref) 40 | elif isinstance(arg, (int, float, str, bool)): 41 | value = json.dumps(arg) 42 | else: 43 | value = arg 44 | arguments.append(value) 45 | 46 | return f"{self.name}({', '.join(arguments)})" 47 | 48 | def __repr__(self) -> str: 49 | arguments = [repr(arg) for arg in self.arguments] 50 | return f"{self.name}({', '.join(arguments)})" 51 | 52 | 53 | class FunctionExpression(BaseFunctionExpression, ReturnableExpression): 54 | name: str 55 | 56 | def __init__(self, *arguments): 57 | if not hasattr(self, "name") or not isinstance(self.name, str): 58 | raise ValueError("function name not defined") 59 | super().__init__(self.name, *arguments) 60 | 61 | 62 | # document 63 | class Document(FunctionExpression): 64 | name = "DOCUMENT" 65 | 66 | def __init__(self, _id: str): 67 | super().__init__(_id) 68 | 69 | 70 | class Unset(FunctionExpression): 71 | name = "UNSET" 72 | 73 | def __init__(self, collection, *fields): 74 | fields = list(fields) 75 | for i, field in enumerate(fields): 76 | if not isinstance(field, LiteralExpression): 77 | fields[i] = LiteralExpression(field) 78 | super().__init__(collection, *fields) 79 | 80 | 81 | class Merge( 82 | FunctionExpression, 83 | ObjectExpression, 84 | ): 85 | name = "MERGE" 86 | 87 | def __init__(self, *objects): 88 | objects_ = [] 89 | for o in objects: 90 | if isinstance(o, dict): 91 | objects_.append(ObjectExpression(o)) 92 | else: 93 | objects_.append(o) 94 | super().__init__(*objects_) 95 | 96 | 97 | class Has(FunctionExpression): 98 | name = "HAS" 99 | 100 | def __init__(self, collection: str, attr: str): 101 | super().__init__(collection, attr) 102 | 103 | 104 | class Zip(FunctionExpression): 105 | name = "ZIP" 106 | 107 | 108 | class Sum(FunctionExpression): 109 | name = "SUM" 110 | 111 | 112 | class Length(FunctionExpression): 113 | name = "LENGTH" 114 | further = "" 115 | 116 | def __sub__(self, other): 117 | self.further = f" - {other}" 118 | return self 119 | 120 | def __add__(self, other): 121 | self.further = f" + {other}" 122 | return self 123 | 124 | def compile(self, query_ref: "QueryExpression") -> str: 125 | return super().compile(query_ref) + self.further 126 | 127 | 128 | class CollectionsExpression(FunctionExpression): 129 | name = "COLLECTIONS" 130 | 131 | def __init__(self): 132 | super().__init__() 133 | 134 | 135 | # strings 136 | class RegExMatch(FunctionExpression): 137 | name = "REGEX_MATCHES" 138 | 139 | def __init__( 140 | self, 141 | string, 142 | pattern, 143 | case_insensitive: bool, 144 | ): 145 | super().__init__(string, pattern, case_insensitive) 146 | 147 | 148 | # arrays 149 | class ReturnsArray(IterableExpression, ABC): 150 | pass 151 | 152 | 153 | class Append(FunctionExpression): 154 | name = "APPEND" 155 | 156 | def __init__(self, array, value): 157 | super().__init__(array, value) 158 | 159 | 160 | class Concat(FunctionExpression, ReturnsArray): 161 | name = "CONCAT" 162 | 163 | def __init__(self, *arrays): 164 | super().__init__(*arrays) 165 | 166 | 167 | class Count(FunctionExpression): 168 | name = "COUNT" 169 | 170 | def __init__(self, array): 171 | super().__init__(array) 172 | 173 | 174 | class CountDistinct(FunctionExpression): 175 | name = "COUNT_DISTINCT" 176 | 177 | def __init__(self, array): 178 | super().__init__(array) 179 | 180 | 181 | CountUnique = CountDistinct 182 | 183 | 184 | class First(FunctionExpression): 185 | name = "FIRST" 186 | 187 | def __init__(self, array): 188 | super().__init__(array) 189 | 190 | 191 | class Flatten(FunctionExpression): 192 | name = "FLATTEN" 193 | 194 | def __init__(self, array): 195 | super().__init__(array) 196 | 197 | 198 | class Interleave(FunctionExpression): 199 | name = "INTERLEAVE" 200 | 201 | def __init__(self, *array): 202 | super().__init__(*array) 203 | 204 | 205 | class Jaccard(FunctionExpression): 206 | name = "JACCARD" 207 | 208 | def __init__(self, array1, array2): 209 | super().__init__(array1, array2) 210 | 211 | 212 | class Last(FunctionExpression): 213 | name = "LAST" 214 | 215 | def __init__(self, array): 216 | super().__init__(array) 217 | 218 | 219 | class Nth(FunctionExpression): 220 | name = "NTH" 221 | 222 | def __init__(self, array, index): 223 | super().__init__(array, index) 224 | 225 | 226 | class Push(FunctionExpression): 227 | name = "PUSH" 228 | 229 | def __init__(self, array, value): 230 | super().__init__(array, value) 231 | 232 | 233 | class Pop(FunctionExpression): 234 | name = "POP" 235 | 236 | def __init__(self, array): 237 | super().__init__(array) 238 | 239 | 240 | class RemoveNth(FunctionExpression): 241 | name = "REMOVE_NTH" 242 | 243 | def __init__(self, array, index): 244 | super().__init__(array, index) 245 | 246 | 247 | class ReplaceNth(FunctionExpression): 248 | name = "REPLACE_NTH" 249 | 250 | def __init__(self, array, index): 251 | super().__init__(array, index) 252 | 253 | 254 | class RemoveValue(FunctionExpression): 255 | name = "REMOVE_VALUE" 256 | 257 | def __init__(self, array, value, limit: Optional[int] = None): 258 | super().__init__(array, value, limit) 259 | 260 | 261 | class Reverse(FunctionExpression): 262 | name = "REVERSE" 263 | 264 | def __init__(self, array): 265 | super().__init__(array) 266 | 267 | 268 | class Shift(FunctionExpression): 269 | name = "SHIFT" 270 | 271 | def __init__(self, array): 272 | super().__init__(array) 273 | 274 | 275 | class ArrayFunctionMixin: 276 | def __getitem__(self, item): 277 | return self.arguments[item] 278 | 279 | 280 | class Slice(FunctionExpression, IterableExpression, ArrayFunctionMixin): 281 | name = "SLICE" 282 | 283 | def __init__(self, array, start, count=None): 284 | if count is None: 285 | super().__init__(array, start) 286 | else: 287 | super().__init__(array, start, count) 288 | 289 | 290 | class UnionArrays(FunctionExpression, IterableExpression, ArrayFunctionMixin): 291 | name = "UNION" 292 | 293 | def __init__(self, *arrays: Union[ListExpression, "VariableExpression"]): 294 | super().__init__(*arrays) 295 | 296 | 297 | class Difference(FunctionExpression): 298 | name = "DIFFERENCE" 299 | 300 | def __init__(self, array1, array2): 301 | super().__init__(array1, array2) 302 | 303 | 304 | class Intersection(FunctionExpression): 305 | name = "INTERSECTION" 306 | 307 | def __init__(self, *arrays): 308 | super().__init__(*arrays) 309 | 310 | 311 | class Outersection(FunctionExpression): 312 | name = "OUTERSECTION" 313 | 314 | def __init__(self, *array): 315 | super().__init__(*array) 316 | 317 | 318 | class Minus(FunctionExpression): 319 | name = "MINUS" 320 | 321 | def __init__(self, array1, array2): 322 | super().__init__(array1, array2) 323 | 324 | 325 | class Position(FunctionExpression): 326 | name = "POSITION" 327 | 328 | def __init__(self, array, value): 329 | super().__init__(array, value) 330 | 331 | 332 | Contains = Position 333 | 334 | 335 | class UnionDistinct(FunctionExpression): 336 | name = "UNION_DISTINCT" 337 | 338 | def __init__(self, *arrays): 339 | super().__init__(*arrays) 340 | 341 | 342 | # bit 343 | 344 | 345 | class BitAnd(FunctionExpression): 346 | name = "BIT_AND" 347 | 348 | def __init__(self, value1, value2): 349 | super().__init__(value1, value2) 350 | 351 | 352 | class BitNot(FunctionExpression): 353 | name = "BIT_NOT" 354 | 355 | def __init__(self, value): 356 | super().__init__(value) 357 | 358 | 359 | class BitOr(FunctionExpression): 360 | name = "BIT_OR" 361 | 362 | def __init__(self, value1, value2): 363 | super().__init__(value1, value2) 364 | 365 | 366 | class BitXor(FunctionExpression): 367 | name = "BIT_XOR" 368 | 369 | def __init__(self, value1, value2): 370 | super().__init__(value1, value2) 371 | 372 | 373 | # date 374 | 375 | 376 | class DateAdd(FunctionExpression): 377 | name = "DATE_ADD" 378 | 379 | def __init__(self, date, amount, unit): 380 | super().__init__(date, amount, unit) 381 | 382 | 383 | class DateCompare(FunctionExpression): 384 | name = "DATE_COMPARE" 385 | 386 | def __init__(self, date1, date2): 387 | super().__init__(date1, date2) 388 | 389 | 390 | class DateDay(FunctionExpression): 391 | name = "DATE_DAY" 392 | 393 | def __init__(self, date): 394 | super().__init__(date) 395 | 396 | 397 | class DateDayOfWeek(FunctionExpression): 398 | name = "DATE_DAY_OF_WEEK" 399 | 400 | def __init__(self, date): 401 | super().__init__(date) 402 | 403 | 404 | class DateDaysInMonth(FunctionExpression): 405 | name = "DATE_DAYS_IN_MONTH" 406 | 407 | def __init__(self, date): 408 | super().__init__(date) 409 | 410 | 411 | class DateDiff(FunctionExpression): 412 | name = "DATE_DIFF" 413 | 414 | def __init__(self, date1, date2, unit): 415 | super().__init__(date1, date2, unit) 416 | 417 | 418 | class DateHour(FunctionExpression): 419 | name = "DATE_HOUR" 420 | 421 | def __init__(self, date): 422 | super().__init__(date) 423 | 424 | 425 | class DateMilliseconds(FunctionExpression): 426 | name = "DATE_MILLISECONDS" 427 | 428 | def __init__(self, date): 429 | super().__init__(date) 430 | 431 | 432 | class DateMinute(FunctionExpression): 433 | name = "DATE_MINUTE" 434 | 435 | def __init__(self, date): 436 | super().__init__(date) 437 | 438 | 439 | class DateMonth(FunctionExpression): 440 | name = "DATE_MONTH" 441 | 442 | def __init__(self, date): 443 | super().__init__(date) 444 | 445 | 446 | class DateNow(FunctionExpression): 447 | name = "DATE_NOW" 448 | 449 | def __init__(self): 450 | super().__init__() 451 | 452 | 453 | class DateSecond(FunctionExpression): 454 | name = "DATE_SECOND" 455 | 456 | def __init__(self, date): 457 | super().__init__(date) 458 | 459 | 460 | class DateSubtract(FunctionExpression): 461 | name = "DATE_SUBTRACT" 462 | 463 | def __init__(self, date, amount, unit): 464 | super().__init__(date, amount, unit) 465 | 466 | 467 | class DateTimestamp(FunctionExpression): 468 | name = "DATE_TIMESTAMP" 469 | 470 | def __init__(self, date): 471 | super().__init__(date) 472 | 473 | 474 | class DateYear(FunctionExpression): 475 | name = "DATE_YEAR" 476 | 477 | def __init__(self, date): 478 | super().__init__(date) 479 | -------------------------------------------------------------------------------- /pydango/connection/graph_utils.py: -------------------------------------------------------------------------------- 1 | from collections import OrderedDict, defaultdict 2 | from typing import ( 3 | Any, 4 | DefaultDict, 5 | Iterator, 6 | Optional, 7 | Type, 8 | Union, 9 | cast, 10 | get_args, 11 | get_origin, 12 | ) 13 | 14 | from indexed import IndexedOrderedDict 15 | 16 | from pydango.connection.types import ( 17 | EdgeCollectionsMapping, 18 | EdgesIdsMapping, 19 | EdgeVerticesIndexMapping, 20 | ModelFieldMapping, 21 | RelationGroup, 22 | VertexCollectionsMapping, 23 | VerticesIdsMapping, 24 | ) 25 | from pydango.orm.consts import EDGES 26 | from pydango.orm.models import EdgeModel, VertexModel 27 | from pydango.orm.models.base import LIST_TYPES, ArangoModel, BaseArangoModel, LazyProxy 28 | from pydango.orm.models.utils import convert_edge_data_to_valid_kwargs 29 | from pydango.orm.models.vertex import TVertexModel 30 | from pydango.query.consts import FROM, ID, KEY, REV, TO 31 | 32 | 33 | def get_collection_from_document(obj: Union[str, dict, "ArangoModel"]) -> str: 34 | _obj = None 35 | if isinstance(obj, dict): 36 | _obj = obj.get(ID) 37 | elif isinstance(obj, BaseArangoModel): 38 | _obj = obj.id 39 | 40 | if not _obj or not isinstance(_obj, str): 41 | raise ValueError("cannot parse collection") 42 | 43 | return _obj.partition("/")[0] 44 | 45 | 46 | def _group_by_relation( 47 | model: BaseArangoModel, 48 | ) -> Iterator[RelationGroup]: 49 | relationships = model.__relationships__ 50 | for field, relation in relationships.items(): 51 | if get_origin(relation.link_model) is Union: 52 | for model_option in get_args(relation.link_model): 53 | yield RelationGroup(model_option.Collection.name, field, model_option, relation.via_model) 54 | else: 55 | yield RelationGroup(relation.link_model.Collection.name, field, relation.link_model, relation.via_model) 56 | 57 | 58 | def _set_edge_operational_fields(result, model_id, edges_ids, i): 59 | e_obj = result["edges"][i.Collection.name][edges_ids[i.__class__][model_id][id(i)]] 60 | i.id = e_obj[ID] 61 | i.key = e_obj[KEY] 62 | i.rev = e_obj[REV] 63 | i.from_ = e_obj[FROM] 64 | i.to = e_obj[TO] 65 | 66 | 67 | def db_traverse( 68 | model: TVertexModel, 69 | visited: set, 70 | result, 71 | model_fields_mapping: ModelFieldMapping, 72 | vertices_ids: VerticesIdsMapping, 73 | edges_ids: EdgesIdsMapping, 74 | ): 75 | model_id = id(model) 76 | if model_id in visited: 77 | return 78 | 79 | if isinstance(model, VertexModel): 80 | visited.add(model_id) 81 | 82 | v_index = vertices_ids[model.__class__][model_id] 83 | v_obj = result["vertex"][model.Collection.name][v_index] 84 | model.id = v_obj[ID] 85 | model.key = v_obj[KEY] 86 | model.rev = v_obj[REV] 87 | 88 | relations = list(_group_by_relation(model)) 89 | if not relations: 90 | return 91 | 92 | for relation_group in relations: 93 | relation_doc: Union[TVertexModel, None] = getattr(model, relation_group.field) 94 | if not relation_doc: 95 | continue 96 | 97 | if isinstance(relation_doc, LazyProxy): 98 | relation_doc = relation_doc.__instance__ # type: ignore[assignment] 99 | 100 | if model.edges: 101 | for edge_field, obj in model.edges.__dict__.items(): 102 | if isinstance(obj, list): 103 | for i in obj: 104 | _set_edge_operational_fields(result, model_id, edges_ids, i) 105 | elif obj is not None: 106 | _set_edge_operational_fields(result, model_id, edges_ids, obj) 107 | if isinstance(relation_doc, list): 108 | z = zip(relation_doc, getattr(model.edges, relation_group.field, [])) 109 | for vertex_doc, edge_doc in z: 110 | db_traverse(vertex_doc, visited, result, model_fields_mapping, vertices_ids, edges_ids) 111 | else: 112 | getattr(model.edges, relation_group.field) 113 | db_traverse( 114 | cast(VertexModel, relation_doc), visited, result, model_fields_mapping, vertices_ids, edges_ids 115 | ) 116 | else: 117 | # todo: insert join relation 118 | raise NotImplementedError("join relation not implemented yet") 119 | 120 | 121 | def graph_to_document(traversal_result: dict, model: Type[VertexModel]): 122 | doc = traversal_result["doc"] 123 | 124 | vertices: DefaultDict[str, dict[str, Any]] = defaultdict(dict) 125 | edges: dict[str, dict[tuple[str, str], Union[list[dict[str, Any]], dict[str, Any]]]] = {} 126 | if doc: 127 | vertices[doc[ID]] = doc 128 | edge_count = 0 129 | for relation in traversal_result["edges"]: 130 | v = relation["v"] 131 | e = relation["e"] 132 | edge_coll = get_collection_from_document(e) 133 | vertices[v[ID]] = v 134 | coordinate = (e[FROM], e[TO]) 135 | 136 | if coordinate not in edges: 137 | edges.setdefault(edge_coll, {})[coordinate] = e 138 | elif isinstance(edges[edge_coll][coordinate], list): 139 | cast(list, edges[edge_coll][coordinate]).append(e) 140 | else: 141 | edges.setdefault(edge_coll, {})[coordinate] = [cast(dict[str, Any], edges[edge_coll][coordinate])] 142 | cast(list, edges[edge_coll][coordinate]).append(e) 143 | 144 | edge_count += 1 145 | 146 | if len(traversal_result["edges"]) != edge_count: 147 | raise AssertionError("something happened could not map all edges") 148 | 149 | new_d, recursive = map_graph_edges({"start": doc[ID], "vertices": vertices, "edges": edges}, model) 150 | 151 | return new_d, recursive 152 | 153 | 154 | def map_graph_edges(graph, model): 155 | vertices = graph["vertices"] 156 | edges = graph["edges"] 157 | start = graph["start"] 158 | visited = set() 159 | recursive = False 160 | for coll, _edges in edges.items(): 161 | for (f, t), e in _edges.items(): 162 | to = vertices[t] 163 | coll, _, __ = e[ID].partition("/") 164 | 165 | for func in model.__edge_to_field_mapping__[coll]: 166 | if to[ID] == start: 167 | continue 168 | if callable(func): 169 | if func(e, to): 170 | map_edge(e, f, func.__name__, model, to, vertices) 171 | break 172 | else: 173 | map_edge(e, f, func, model, to, vertices) 174 | 175 | if id(to) in visited: 176 | recursive = True 177 | visited.add(id(to)) 178 | # note here to detect circular references 179 | # - not supported in pydantic v1 start in pydantic v2 180 | return vertices[start], recursive 181 | 182 | 183 | def map_edge(e, f, func, model, to, vertices): 184 | link_type = model.__relationships__[func].link_type 185 | if link_type in LIST_TYPES: 186 | if func not in vertices[f]: 187 | vertices[f][func] = [] 188 | 189 | vertices[f][func].append(to) 190 | 191 | if func not in vertices[f].setdefault(EDGES, {}): 192 | vertices[f][EDGES][func] = [] 193 | vertices[f][EDGES][func].append(e) 194 | else: 195 | vertices[f][func] = to 196 | vertices[f].setdefault(EDGES, {})[func] = e 197 | 198 | 199 | def _build_object_graph_mappings( 200 | model: "TVertexModel", 201 | visited: set[int], 202 | vertex_collections: VertexCollectionsMapping, 203 | edge_collections: EdgeCollectionsMapping, 204 | edge_vertex_index: EdgeVerticesIndexMapping, 205 | model_fields_mapping: ModelFieldMapping, 206 | ) -> None: 207 | def _add_model_field_to_mapping(model, field, relation_doc, edge_doc): 208 | model_id = id(model) 209 | 210 | mapping = model_fields_mapping.setdefault(model.__class__, {}) 211 | model_mapping = mapping.setdefault(model_id, {}) 212 | 213 | if model.__relationships__[field].link_type in LIST_TYPES: 214 | model_mapping.setdefault(field, []).append({"v": id(relation_doc), "e": id(edge_doc)}) 215 | else: 216 | model_mapping[field] = {"v": id(relation_doc), "e": id(edge_doc)} 217 | 218 | def _prepare_relation(field, model, edge_cls, edge_doc, relation_doc): 219 | model_id = id(model) 220 | if id(relation_doc) in ( 221 | edge_vertex_index.setdefault(edge_cls, {}) 222 | .setdefault(model_id, {}) 223 | .setdefault((model.__class__, relation_doc.__class__), []) 224 | ): 225 | return False 226 | 227 | if edge_doc: 228 | edge_collections.setdefault(edge_cls, IndexedOrderedDict()).setdefault(model_id, []).append(edge_doc) 229 | 230 | _add_model_field_to_mapping(model, field, relation_doc, edge_doc) 231 | 232 | ( 233 | edge_vertex_index.setdefault(edge_cls, {}) 234 | .setdefault(model_id, {}) 235 | .setdefault((model.__class__, relation_doc.__class__), []) 236 | .append(id(relation_doc)) 237 | ) 238 | 239 | if id(model) in visited: 240 | return 241 | 242 | if isinstance(model, VertexModel): 243 | vertex_collections.setdefault(model.__class__, IndexedOrderedDict())[id(model)] = model 244 | visited.add(id(model)) 245 | 246 | relations = list(_group_by_relation(model)) 247 | 248 | if not relations: 249 | return 250 | 251 | for relation_group in relations: 252 | relation_doc: Union[TVertexModel, None] = getattr(model, relation_group.field) 253 | 254 | if isinstance(relation_doc, LazyProxy): 255 | relation_doc = cast(VertexModel, relation_doc.__instance__) 256 | 257 | if not relation_doc: 258 | _add_model_field_to_mapping(model, relation_group.field, None, None) 259 | continue 260 | 261 | edge_cls: Optional[Type[EdgeModel]] = relation_group.via_model 262 | 263 | if model.edges: 264 | if isinstance(model.edges, dict): 265 | convert_edge_data_to_valid_kwargs(model.edges) 266 | # todo: this initiate the class edge model so it validates the edges, should we do that? 267 | model.edges = model.__fields__[EDGES].type_(**model.edges) 268 | 269 | if isinstance(relation_doc, list): 270 | if len(getattr(model.edges, relation_group.field, [])) != len(relation_doc): 271 | raise AssertionError( 272 | f"{model.__class__.__name__} vertex edges {relation_group.field} number mismatch" 273 | ) 274 | vertex_doc: VertexModel 275 | edge_doc: EdgeModel 276 | for vertex_doc, edge_doc in zip(relation_doc, getattr(model.edges, relation_group.field, [])): 277 | _prepare_relation(relation_group.field, model, edge_cls, edge_doc, vertex_doc) 278 | _build_object_graph_mappings( 279 | vertex_doc, 280 | visited, 281 | vertex_collections, 282 | edge_collections, 283 | edge_vertex_index, 284 | model_fields_mapping, 285 | ) 286 | 287 | else: 288 | edge_doc = getattr(model.edges, relation_group.field) 289 | _prepare_relation(relation_group.field, model, edge_cls, edge_doc, relation_doc) 290 | _build_object_graph_mappings( 291 | relation_doc, visited, vertex_collections, edge_collections, edge_vertex_index, model_fields_mapping 292 | ) 293 | else: 294 | # todo: insert join relation 295 | pass 296 | 297 | 298 | def _build_graph( 299 | document: VertexModel, _visited: set[int] 300 | ) -> tuple[EdgeCollectionsMapping, EdgeVerticesIndexMapping, VertexCollectionsMapping, ModelFieldMapping]: 301 | vertex_collections: VertexCollectionsMapping = OrderedDict() 302 | edge_collections: EdgeCollectionsMapping = OrderedDict() 303 | edge_vertex_index: EdgeVerticesIndexMapping = {} # defaultdict(lambda: defaultdict(lambda: defaultdict(list))) 304 | model_fields_mapping: ModelFieldMapping = {} 305 | 306 | _build_object_graph_mappings( 307 | document, _visited, vertex_collections, edge_collections, edge_vertex_index, model_fields_mapping 308 | ) 309 | return edge_collections, edge_vertex_index, vertex_collections, model_fields_mapping 310 | -------------------------------------------------------------------------------- /tests/session/test_family.py: -------------------------------------------------------------------------------- 1 | from typing import Annotated, Iterable, Optional, Type 2 | 3 | import pytest 4 | from _pytest.fixtures import FixtureRequest 5 | from pydiction import ANY_NOT_NONE, Contains, Matcher 6 | 7 | from pydango.connection.session import PydangoSession 8 | from pydango.indexes import PersistentIndex 9 | from pydango.orm.models import BaseArangoModel, EdgeModel, VertexModel 10 | from pydango.orm.models.base import Relation 11 | from pydango.orm.models.edge import EdgeCollectionConfig 12 | from pydango.orm.models.vertex import VertexCollectionConfig 13 | from pydango.utils import init_models 14 | 15 | 16 | class Person(VertexModel): 17 | name: str 18 | age: int 19 | brothers: Annotated[Optional[list["Person"]], Relation["Sibling"]] 20 | sisters: Annotated[Optional[list["Person"]], Relation["Sibling"]] 21 | father: Annotated[Optional["Person"], Relation["Sibling"]] 22 | mother: Annotated[Optional["Person"], Relation["Sibling"]] 23 | 24 | class Collection(VertexCollectionConfig): 25 | name = "people" 26 | indexes = [PersistentIndex(fields=["name"]), PersistentIndex(fields=["age"])] 27 | 28 | @staticmethod 29 | def brothers(e: dict, _: dict) -> bool: 30 | return e["connection"] == "Brother" 31 | 32 | @staticmethod 33 | def sisters(e: dict, _: dict) -> bool: 34 | return e["connection"] == "Sister" 35 | 36 | @staticmethod 37 | def father(e: dict, _: dict) -> bool: 38 | return e["connection"] == "Father" 39 | 40 | @staticmethod 41 | def mother(e: dict, _: dict) -> bool: 42 | return e["connection"] == "Mother" 43 | 44 | 45 | class Sibling(EdgeModel): 46 | connection: str 47 | 48 | class Collection(EdgeCollectionConfig): 49 | name = "siblings" 50 | 51 | 52 | Person.update_forward_refs() 53 | 54 | 55 | def test_obj(): 56 | Person.parse_obj( 57 | { 58 | "_id": "people/29887", 59 | "_key": "29887", 60 | "_rev": "_gO2JSqS---", 61 | "age": 35, 62 | "brothers": [{"_key": "29888", "_id": "people/29888", "_rev": "_gO2JSqS--_", "name": "Ben", "age": 45}], 63 | "father": {"_key": "29891", "_id": "people/29891", "_rev": "_gO2JSqS--C", "name": "Father", "age": 70}, 64 | "mother": {"_key": "29892", "_id": "people/29892", "_rev": "_gO2JSqS--D", "name": "Mother", "age": 70}, 65 | "name": "John", 66 | "sisters": [ 67 | {"_key": "29889", "_id": "people/29889", "_rev": "_gO2JSqS--A", "name": "Fiona", "age": 12}, 68 | {"_key": "29890", "_id": "people/29890", "_rev": "_gO2JSqS--B", "name": "Jessica", "age": 12}, 69 | ], 70 | "edges": { 71 | "brothers": [ 72 | { 73 | "_key": "29893", 74 | "_id": "siblings/29893", 75 | "_from": "people/29887", 76 | "_to": "people/29888", 77 | "_rev": "_gO2JSqW---", 78 | "connection": "Brother", 79 | } 80 | ], 81 | "sisters": [ 82 | { 83 | "_key": "29894", 84 | "_id": "siblings/29894", 85 | "_from": "people/29887", 86 | "_to": "people/29889", 87 | "_rev": "_gO2JSqm---", 88 | "connection": "Sister", 89 | }, 90 | { 91 | "_key": "29895", 92 | "_id": "siblings/29895", 93 | "_from": "people/29887", 94 | "_to": "people/29890", 95 | "_rev": "_gO2JSqm--_", 96 | "connection": "Sister", 97 | }, 98 | ], 99 | "father": { 100 | "_key": "29896", 101 | "_id": "siblings/29896", 102 | "_from": "people/29887", 103 | "_to": "people/29891", 104 | "_rev": "_gO2JSqq---", 105 | "connection": "Father", 106 | }, 107 | "mother": { 108 | "_key": "29897", 109 | "_id": "siblings/29897", 110 | "_from": "people/29887", 111 | "_to": "people/29892", 112 | "_rev": "_gO2JSqq--_", 113 | "connection": "Mother", 114 | }, 115 | }, 116 | } 117 | ) 118 | 119 | 120 | @pytest.fixture(scope="module", autouse=True) 121 | async def init_collections(session: PydangoSession): 122 | models: Iterable[Type[BaseArangoModel]] = (Person, Sibling) 123 | await init_models(session, *models) 124 | 125 | 126 | def expected_person(person: Person): 127 | assert person.sisters 128 | assert person.brothers 129 | assert person.father 130 | assert person.mother 131 | return { 132 | "_id": ANY_NOT_NONE, 133 | "_key": ANY_NOT_NONE, 134 | "_rev": ANY_NOT_NONE, 135 | "age": person.age, 136 | "brothers": [ 137 | { 138 | "_id": ANY_NOT_NONE, 139 | "_key": ANY_NOT_NONE, 140 | "_rev": ANY_NOT_NONE, 141 | "age": person.brothers[0].age, 142 | "brothers": None, 143 | "edges": None, 144 | "father": None, 145 | "mother": None, 146 | "name": person.brothers[0].name, 147 | "sisters": None, 148 | } 149 | ], 150 | "edges": { 151 | "brothers": [ 152 | { 153 | "_from": ANY_NOT_NONE, 154 | "_id": ANY_NOT_NONE, 155 | "_key": ANY_NOT_NONE, 156 | "_rev": ANY_NOT_NONE, 157 | "_to": ANY_NOT_NONE, 158 | "connection": person.edges.brothers[0].connection, 159 | } 160 | ], 161 | "father": { 162 | "_from": ANY_NOT_NONE, 163 | "_id": ANY_NOT_NONE, 164 | "_key": ANY_NOT_NONE, 165 | "_rev": ANY_NOT_NONE, 166 | "_to": ANY_NOT_NONE, 167 | "connection": person.edges.father.connection, 168 | }, 169 | "mother": { 170 | "_from": ANY_NOT_NONE, 171 | "_id": ANY_NOT_NONE, 172 | "_key": ANY_NOT_NONE, 173 | "_rev": ANY_NOT_NONE, 174 | "_to": ANY_NOT_NONE, 175 | "connection": person.edges.mother.connection, 176 | }, 177 | "sisters": [ 178 | { 179 | "_from": ANY_NOT_NONE, 180 | "_id": ANY_NOT_NONE, 181 | "_key": ANY_NOT_NONE, 182 | "_rev": ANY_NOT_NONE, 183 | "_to": ANY_NOT_NONE, 184 | "connection": person.edges.sisters[0].connection, 185 | }, 186 | { 187 | "_from": ANY_NOT_NONE, 188 | "_id": ANY_NOT_NONE, 189 | "_key": ANY_NOT_NONE, 190 | "_rev": ANY_NOT_NONE, 191 | "_to": ANY_NOT_NONE, 192 | "connection": person.edges.sisters[1].connection, 193 | }, 194 | ], 195 | }, 196 | "father": { 197 | "_id": ANY_NOT_NONE, 198 | "_key": ANY_NOT_NONE, 199 | "_rev": ANY_NOT_NONE, 200 | "age": person.father.age, 201 | "brothers": None, 202 | "edges": None, 203 | "father": None, 204 | "mother": None, 205 | "name": person.father.name, 206 | "sisters": None, 207 | }, 208 | "mother": { 209 | "_id": ANY_NOT_NONE, 210 | "_key": ANY_NOT_NONE, 211 | "_rev": ANY_NOT_NONE, 212 | "age": person.mother.age, 213 | "brothers": None, 214 | "edges": None, 215 | "father": None, 216 | "mother": None, 217 | "name": person.mother.name, 218 | "sisters": None, 219 | }, 220 | "name": person.name, 221 | "sisters": Contains( 222 | [ 223 | { 224 | "_id": ANY_NOT_NONE, 225 | "_key": ANY_NOT_NONE, 226 | "_rev": ANY_NOT_NONE, 227 | "age": person.sisters[0].age, 228 | "brothers": None, 229 | "edges": None, 230 | "father": None, 231 | "mother": None, 232 | "name": person.sisters[0].name, 233 | "sisters": None, 234 | }, 235 | { 236 | "_id": ANY_NOT_NONE, 237 | "_key": ANY_NOT_NONE, 238 | "_rev": ANY_NOT_NONE, 239 | "age": person.sisters[1].age, 240 | "brothers": None, 241 | "edges": None, 242 | "father": None, 243 | "mother": None, 244 | "name": person.sisters[1].name, 245 | "sisters": None, 246 | }, 247 | ] 248 | ), 249 | } 250 | 251 | 252 | @pytest.mark.run(order=1) 253 | @pytest.mark.asyncio 254 | async def test_save(session: PydangoSession, request: FixtureRequest): 255 | fiona = Person(name="Fiona", age=12) 256 | jessica = Person(name="Jessica", age=12) 257 | ben = Person(name="Ben", age=45) 258 | john = Person(name="John", age=35) 259 | father = Person(name="Father", age=70) 260 | mother = Person(name="Mother", age=70) 261 | 262 | sister_edges = { 263 | Person.father: Sibling(connection="Father"), 264 | Person.mother: Sibling(connection="Mother"), 265 | Person.sisters: [Sibling(connection="Sister")], 266 | Person.brothers: [Sibling(connection="Brother"), Sibling(connection="Brother")], 267 | } 268 | 269 | brother_edges = { 270 | Person.father: Sibling(connection="Father"), 271 | Person.mother: Sibling(connection="Mother"), 272 | Person.sisters: [Sibling(connection="Sister"), Sibling(connection="Sister")], 273 | Person.brothers: [Sibling(connection="Brother")], 274 | } 275 | 276 | fiona.sisters = [jessica] 277 | fiona.brothers = [ben, john] 278 | fiona.father = father 279 | fiona.mother = mother 280 | 281 | fiona.edges = sister_edges.copy() # type: ignore[assignment] 282 | 283 | jessica.sisters = [fiona] 284 | jessica.brothers = [ben, john] 285 | jessica.father = father 286 | jessica.mother = mother 287 | 288 | jessica.edges = sister_edges.copy() # type: ignore[assignment] 289 | 290 | john.sisters = [fiona, jessica] 291 | john.brothers = [ben] 292 | john.father = father 293 | john.mother = mother 294 | 295 | john.edges = brother_edges.copy() # type: ignore[assignment] 296 | 297 | ben.sisters = [fiona, jessica] 298 | ben.brothers = [john] 299 | ben.father = father 300 | ben.mother = mother 301 | 302 | ben.edges = brother_edges.copy() # type: ignore[assignment] 303 | 304 | p = await session.save(john) 305 | request.config.cache.set("person_key", p.key) # type: ignore[union-attr] 306 | 307 | # todo: there is currently a caveat with pydantic v1 with circular references, in pydantic v2 this is resolved 308 | # def traverse_recursive_fields(p, recursive_fields, visited): 309 | # if isinstance(p, Sequence): 310 | # for i in p: 311 | # traverse_recursive_fields(i, exclude, visited) 312 | # 313 | # else: 314 | # d = p.dict(include_edges=False, by_alias=True, exclude=recursive_fields) 315 | # for recursive_field in recursive_fields: 316 | # attr = getattr(p, recursive_field) 317 | # 318 | # for i in attr: 319 | # d[recursive_field] = i.dict(include_edges=False, by_alias=True, exclude=recursive_fields) 320 | # visited.add(id(i)) 321 | # if id(attr) in visited: 322 | # return d 323 | # visited.add(id(attr)) 324 | # traverse_recursive_fields(attr, exclude, visited) 325 | # return d 326 | # exclude = { 327 | # "brothers", 328 | # "sisters", 329 | # } 330 | # actual = traverse_recursive_fields(p, exclude, visited=set()) 331 | # person = expected_person(p) 332 | # Matcher().assert_declarative_object(actual, person) 333 | 334 | 335 | @pytest.mark.run(order=2) 336 | async def test_get(matcher: Matcher, session: PydangoSession, request: FixtureRequest): 337 | _id = request.config.cache.get("person_key", None) # type: ignore[union-attr] 338 | result = await session.get(Person, _id, fetch_edges=True) 339 | assert result 340 | result_dict = result.dict(by_alias=True, include_edges=True) 341 | person = expected_person(result) 342 | matcher.assert_declarative_object(result_dict, person) 343 | --------------------------------------------------------------------------------