├── mango
├── py.typed
├── meta.py
├── __init__.py
├── encoder.py
├── index.py
├── source.py
├── utils.py
├── drive.py
├── fields.py
├── expression.py
├── result.py
├── models.py
└── stage.py
├── docs
├── README_JA.md
└── README_EN.md
├── .whitesource
├── .pre-commit-config.yaml
├── renovate.json
├── .github
└── workflows
│ └── pypi-publish.yml
├── LICENSE
├── pyproject.toml
├── .ruff.toml
├── assets
└── mango-logo.svg
├── .gitignore
├── README.md
└── pdm.lock
/mango/py.typed:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/docs/README_JA.md:
--------------------------------------------------------------------------------
1 | # 翻訳が必要
--------------------------------------------------------------------------------
/docs/README_EN.md:
--------------------------------------------------------------------------------
1 | # Need translation
--------------------------------------------------------------------------------
/.whitesource:
--------------------------------------------------------------------------------
1 | {
2 | "scanSettings": {
3 | "baseBranches": []
4 | },
5 | "checkRunSettings": {
6 | "vulnerableCheckRunConclusionLevel": "failure",
7 | "displayMode": "diff",
8 | "useMendCheckNames": true
9 | },
10 | "issueSettings": {
11 | "minSeverityLevel": "LOW",
12 | "issueType": "DEPENDENCY"
13 | }
14 | }
--------------------------------------------------------------------------------
/.pre-commit-config.yaml:
--------------------------------------------------------------------------------
1 | ci:
2 | autofix_commit_msg: "🚨 通过预提交挂钩自动修复"
3 | autofix_prs: true
4 | autoupdate_branch: main
5 | autoupdate_schedule: monthly
6 | autoupdate_commit_msg: "⬆️ 自动更新预提交挂钩"
7 | repos:
8 | - repo: https://github.com/astral-sh/ruff-pre-commit
9 | rev: v0.3.5
10 | hooks:
11 | - id: ruff
12 | args: [--fix]
13 | stages: [commit]
14 | - id: ruff-format
15 | stages: [commit]
16 |
--------------------------------------------------------------------------------
/mango/meta.py:
--------------------------------------------------------------------------------
1 | from collections.abc import Sequence
2 | from typing import TypedDict
3 |
4 | from mango.drive import Database
5 | from mango.encoder import EncodeType
6 | from mango.index import Index, IndexTuple
7 |
8 |
9 | class MetaConfig(TypedDict, total=False):
10 | name: str | None
11 | database: Database | str | None
12 | indexes: Sequence[str | Index | Sequence[IndexTuple]]
13 | bson_encoders: EncodeType
14 | by_alias: bool
15 |
--------------------------------------------------------------------------------
/mango/__init__.py:
--------------------------------------------------------------------------------
1 | from importlib.metadata import version
2 |
3 | from mango.expression import OPR
4 | from mango.fields import Field
5 | from mango.index import Attr, Index, Order
6 | from mango.meta import MetaConfig
7 | from mango.models import Document, EmbeddedDocument
8 | from mango.source import Mango
9 | from mango.stage import Pipeline
10 |
11 | __version__ = version("mango-odm")
12 |
13 | __all__ = [
14 | "OPR",
15 | "Field",
16 | "Attr",
17 | "Index",
18 | "Order",
19 | "MetaConfig",
20 | "Document",
21 | "EmbeddedDocument",
22 | "Mango",
23 | "Pipeline",
24 | ]
25 |
--------------------------------------------------------------------------------
/renovate.json:
--------------------------------------------------------------------------------
1 | {
2 | "$schema": "https://docs.renovatebot.com/renovate-schema.json",
3 | "extends": [
4 | "config:base"
5 | ],
6 | "labels": [
7 | "dependencies"
8 | ],
9 | "dependencyDashboard": false,
10 | "minimumReleaseAge": "3 days",
11 | "commitMessagePrefix": "⬆️",
12 | "commitMessageTopic": "dependency `{{depName}}`",
13 | "vulnerabilityAlerts": {
14 | "addLabels": ["security"],
15 | "commitMessagePrefix": "🔒️",
16 | "assigneesFromCodeOwners": true
17 | },
18 | "packageRules": [
19 | {
20 | "groupName": "dev dependencies",
21 | "matchPackageNames": [
22 | "black",
23 | "isort"
24 | ]
25 | },
26 | {
27 | "groupName": "test dependencies",
28 | "matchPackagePatterns": ["pytest"],
29 | "matchPackageNames": [
30 | "hypothesis",
31 | "Faker"
32 | ]
33 | }
34 | ]
35 | }
36 |
--------------------------------------------------------------------------------
/.github/workflows/pypi-publish.yml:
--------------------------------------------------------------------------------
1 | name: Publish
2 |
3 | on:
4 | push:
5 | tags:
6 | - 'v*'
7 | workflow_dispatch:
8 |
9 | jobs:
10 | pypi-publish:
11 | name: Upload release to PyPI
12 | runs-on: ubuntu-latest
13 | steps:
14 | - uses: actions/checkout@master
15 | - name: Set up Python
16 | uses: actions/setup-python@v1
17 | with:
18 | python-version: "3.x"
19 | - name: Install pypa/build
20 | run: >-
21 | python -m
22 | pip install
23 | build
24 | --user
25 | - name: Build a binary wheel and a source tarball
26 | run: >-
27 | python -m
28 | build
29 | --sdist
30 | --wheel
31 | --outdir dist/
32 | .
33 | - name: Publish distribution to PyPI
34 | uses: pypa/gh-action-pypi-publish@release/v1
35 | with:
36 | password: ${{ secrets.PYPI_API_TOKEN }}
37 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2022 Akirami
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/mango/encoder.py:
--------------------------------------------------------------------------------
1 | from collections.abc import Callable
2 | from enum import Enum
3 | from typing import Any, ClassVar, TypeAlias
4 |
5 | from bson.codec_options import CodecOptions, TypeRegistry
6 |
7 | EncodeType: TypeAlias = dict[type[Any] | tuple[type[Any], ...], Callable[..., Any]]
8 |
9 |
10 | class Encoder:
11 | default_encode_type: ClassVar[EncodeType] = {
12 | set: list,
13 | Enum: lambda e: e.value,
14 | }
15 |
16 | @classmethod
17 | def create(
18 | cls,
19 | encode_type: EncodeType | None = None,
20 | ) -> CodecOptions:
21 | """创建一个编码器"""
22 | encode_type = encode_type or {}
23 |
24 | def encoder(value: Any) -> Any:
25 | for type_, encoder in (encode_type | cls.default_encode_type).items():
26 | if isinstance(value, type_):
27 | return encoder(value)
28 | raise TypeError(f"无法编码 {type(value)} 类型的对象: {value}")
29 |
30 | return CodecOptions(type_registry=TypeRegistry(fallback_encoder=encoder))
31 |
32 | @classmethod
33 | def add_encode_type(cls, encode_type: EncodeType) -> None:
34 | """添加新的编码类型"""
35 | cls.default_encode_type |= encode_type
36 |
--------------------------------------------------------------------------------
/mango/index.py:
--------------------------------------------------------------------------------
1 | from collections.abc import Mapping
2 | from enum import Enum, unique
3 | from typing import Any, TypeAlias
4 |
5 | import pymongo
6 |
7 |
8 | class IndexEnum(Enum):
9 | def __str__(self) -> str:
10 | return self.name
11 |
12 |
13 | @unique
14 | class Order(int, IndexEnum):
15 | ASC = pymongo.ASCENDING
16 | """升序排序"""
17 | DESC = pymongo.DESCENDING
18 | """降序排序"""
19 |
20 |
21 | @unique
22 | class Attr(str, IndexEnum): # noqa: SLOT000
23 | GEO2D = pymongo.GEO2D
24 | """二维地理空间索引"""
25 | GEOSPHERE = pymongo.GEOSPHERE
26 | """球型地理空间索引"""
27 | HASHED = pymongo.HASHED
28 | """散列索引"""
29 | TEXT = pymongo.TEXT
30 | """文本索引"""
31 |
32 |
33 | IndexType: TypeAlias = Order | Attr
34 | IndexTuple: TypeAlias = tuple[str, IndexType | Mapping[str, Any]]
35 |
36 |
37 | class Index(pymongo.IndexModel):
38 | ASC = Order.ASC
39 | """升序排序"""
40 | DESC = Order.DESC
41 | """降序排序"""
42 | GEO2D = Attr.GEO2D
43 | """二维地理空间索引"""
44 | GEOSPHERE = Attr.GEOSPHERE
45 | """球型地理空间索引"""
46 | HASHED = Attr.HASHED
47 | """散列索引"""
48 | TEXT = Attr.TEXT
49 | """文本索引"""
50 |
51 | def __init__(
52 | self,
53 | *keys: str | IndexTuple,
54 | name: str | None = None,
55 | unique: bool = False,
56 | background: bool = False,
57 | sparse: bool = False,
58 | **kwargs: Any,
59 | ) -> None:
60 | keys = tuple((k, self.ASC) if isinstance(k, str) else k for k in keys)
61 | params = {
62 | "name": name,
63 | "unique": unique,
64 | "background": background,
65 | "sparse": sparse,
66 | }
67 | params = {k: v for k, v in params.items() if v}
68 | super().__init__(
69 | keys,
70 | **params,
71 | **kwargs,
72 | )
73 |
--------------------------------------------------------------------------------
/pyproject.toml:
--------------------------------------------------------------------------------
1 | [project]
2 | name = "mango-odm"
3 | version = "0.4.1"
4 | description = "🥭 Async MongoDB ODM with type hints in Python"
5 | authors = [{ name = "Akirami", email = "akiramiaya@outlook.com" }]
6 | requires-python = ">=3.10,<4.0"
7 | license = { text = "MIT" }
8 | readme = "README.md"
9 | keywords = [
10 | "mongo",
11 | "mongodb",
12 | "async",
13 | "asyncio",
14 | "odm",
15 | "types",
16 | "pydantic",
17 | "motor",
18 | ]
19 | classifiers = [
20 | "Development Status :: 4 - Beta",
21 | "Framework :: AsyncIO",
22 | "Intended Audience :: Developers",
23 | "License :: OSI Approved :: MIT License",
24 | "Operating System :: OS Independent",
25 | "Programming Language :: Python",
26 | "Programming Language :: Python :: 3",
27 | "Programming Language :: Python :: 3 :: Only",
28 | "Programming Language :: Python :: 3.10",
29 | "Programming Language :: Python :: 3.11",
30 | "Programming Language :: Python :: 3.12",
31 | "Topic :: Database",
32 | "Topic :: Database :: Front-Ends",
33 | "Topic :: Software Development",
34 | "Topic :: Software Development :: Object Brokering",
35 | "Topic :: Software Development :: Libraries",
36 | "Topic :: Software Development :: Libraries :: Python Modules",
37 | "Typing :: Typed",
38 | ]
39 | dependencies = [
40 | "motor>=3.4.0",
41 | "pydantic>=2.7.0",
42 | "motor-types>=1.0.0b4",
43 | ]
44 |
45 | [project.urls]
46 | repository = "https://github.com/A-kirami/mango"
47 |
48 | [tool.pdm.dev-dependencies]
49 | dev = [
50 | "ruff>=0.3.5",
51 | "pre-commit>=3.7.0",
52 | ]
53 | test = [
54 | "pytest>=7.4.4",
55 | "pytest-cov>=4.1.0",
56 | "pytest-sugar>=1.0.0",
57 | "allure-pytest>=2.13.2",
58 | "pytest-asyncio>=0.23.4",
59 | "hypothesis>=6.98.9",
60 | "Faker>=23.2.1",
61 | ]
62 |
63 | [tool.pdm.scripts]
64 | lint = "ruff check"
65 | "lint:fix" = "ruff check --fix"
66 | format = "ruff format"
67 | post_install = "pre-commit install"
68 |
69 | [tool.pyright]
70 | pythonVersion = "3.10"
71 | pythonPlatform = "All"
72 | typeCheckingMode = "basic"
73 |
74 | [tool.pytest.ini_options]
75 | asyncio_mode = "auto"
76 | addopts = "--cov=mango --cov-report=html --cov-report=xml --junit-xml=results.xml --cov-report=term-missing --alluredir=allure_report --clean-alluredir"
77 |
78 | [build-system]
79 | requires = ["pdm-backend"]
80 | build-backend = "pdm.backend"
81 |
--------------------------------------------------------------------------------
/.ruff.toml:
--------------------------------------------------------------------------------
1 | [lint]
2 | select = [
3 | "F", # Pyflakes
4 | "E", # pycodestyle error
5 | "W", # pycodestyle warning
6 | "I", # isort
7 | "UP", # pyupgrade
8 | "ASYNC", # fflake8-async
9 | "S", # flake8-bandit
10 | "B", # flake8-bugbear
11 | "C4", # flake8-comprehensions
12 | "ISC", # flake8-implicit-str-concat
13 | "PIE", # flake8-pie
14 | "T20", # flake8-print
15 | "PYI", # flake8-pyi
16 | "PT", # flake8-pytest-style
17 | "Q", # flake8-quotes
18 | "RSE", # flake8-raise
19 | "RET", # flake8-return
20 | "SLOT", # flake8-slots
21 | "SIM", # flake8-simplify
22 | "TID", # flake8-tidy-imports
23 | "TCH", # flake8-type-checking
24 | "ARG", # flake8-unused-arguments
25 | "PTH", # flake8-use-pathlib
26 | "ERA", # eradicate
27 | "PL", # Pylint
28 | "TRY", # tryceratops
29 | "PERF", # Perflint
30 | # "FURB", # refurb
31 | "RUF", # Ruff-specific rules
32 | ]
33 | ignore = [
34 | "E402", # module-import-not-at-top-of-file
35 | "E501", # line-too-long
36 | "B009", # get-attr-with-constant
37 | "B010", # set-attr-with-constant
38 | "PLC0414", # useless-import-alias
39 | "PLR0913", # too-many-arguments
40 | "TRY003", # raise-vanilla-args
41 | "RUF001", # ambiguous-unicode-character-string
42 | "RUF002", # ambiguous-unicode-character-docstring
43 | "RUF003", # ambiguous-unicode-character-comment
44 |
45 | # Avoid formatter conflicts, see https://docs.astral.sh/ruff/formatter/#conflicting-lint-rules
46 | "W191", # tab-indentation
47 | "E111", # indentation-with-invalid-multiple
48 | "E114", # indentation-with-invalid-multiple-comment
49 | "E117", # over-indented
50 | "D206", # indent-with-spaces
51 | "D300", # triple-single-quotes
52 | "Q000", # bad-quotes-inline-string
53 | "Q001", # bad-quotes-multiline-string
54 | "Q002", # bad-quotes-docstring
55 | "Q003", # avoidable-escaped-quote
56 | "COM812", # missing-trailing-comma
57 | "COM819", # prohibited-trailing-comma
58 | "ISC001", # single-line-implicit-string-concatenation
59 | "ISC002", # multi-line-implicit-string-concatenation
60 | ]
61 | unfixable = [
62 | "F401", # unused-import
63 | "F841", # unused-variable
64 | "ERA001", # commented-out-code
65 | ]
66 |
--------------------------------------------------------------------------------
/mango/source.py:
--------------------------------------------------------------------------------
1 | import asyncio
2 | from typing import TYPE_CHECKING, Any, ClassVar
3 |
4 | from mango.drive import DEFAULT_CONNECT_URI, Client
5 | from mango.utils import get_indexes, to_snake_case
6 |
7 | if TYPE_CHECKING: # pragma: no cover
8 | from mango.models import Document
9 |
10 |
11 | async def init_model(model: type["Document"], *, revise_index: bool = False) -> None:
12 | """初始化文档模型"""
13 | meta = model.meta_config
14 | db = Client.get_database(meta.get("database"))
15 | model.__collection__ = db[meta.get("name") or to_snake_case(model.__name__)]
16 | await init_index(model, revise_index=revise_index)
17 |
18 |
19 | async def init_index(model: type["Document"], *, revise_index: bool = False) -> None:
20 | """初始化文档索引"""
21 | required = ["_id_"]
22 | if indexes := list(get_indexes(model)):
23 | required += await model.__collection__.create_indexes(indexes)
24 | if revise_index:
25 | index_info = await model.__collection__.index_information()
26 | for index in set(index_info) - set(required):
27 | await model.__collection__.drop_index(index)
28 |
29 |
30 | class Mango:
31 | _document_models: ClassVar[set[type["Document"]]] = set()
32 |
33 | @classmethod
34 | async def init(
35 | cls,
36 | db: str | None = None,
37 | *,
38 | uri: str = DEFAULT_CONNECT_URI,
39 | revise_index: bool = False,
40 | **kwargs: Any,
41 | ) -> None:
42 | if db or uri or not Client._clients:
43 | cls.connect(db, uri, **kwargs)
44 | tasks = [
45 | init_model(model, revise_index=revise_index)
46 | for model in cls._document_models
47 | ]
48 | await asyncio.gather(*tasks)
49 |
50 | @classmethod
51 | def connect(
52 | cls,
53 | db: str | None = None,
54 | /,
55 | uri: str = DEFAULT_CONNECT_URI,
56 | **kwargs: Any,
57 | ) -> Client:
58 | """创建连接"""
59 | client = Client(uri, **kwargs)
60 | client.get_database(db)
61 | return client
62 |
63 | @classmethod
64 | def disconnect(cls, *clients: Client) -> None:
65 | """断开连接"""
66 | for client in Client._clients.copy():
67 | if not clients or client in clients:
68 | client.close()
69 |
70 | @classmethod
71 | def register_model(cls, model: type["Document"]) -> None:
72 | """注册模型"""
73 | cls._document_models.add(model)
74 |
--------------------------------------------------------------------------------
/assets/mango-logo.svg:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Byte-compiled / optimized / DLL files
2 | __pycache__/
3 | *.py[cod]
4 | *$py.class
5 |
6 | # C extensions
7 | *.so
8 |
9 | # Distribution / packaging
10 | .Python
11 | build/
12 | develop-eggs/
13 | dist/
14 | downloads/
15 | eggs/
16 | .eggs/
17 | lib/
18 | lib64/
19 | parts/
20 | sdist/
21 | var/
22 | wheels/
23 | share/python-wheels/
24 | *.egg-info/
25 | .installed.cfg
26 | *.egg
27 | MANIFEST
28 |
29 | # PyInstaller
30 | # Usually these files are written by a python script from a template
31 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
32 | *.manifest
33 | *.spec
34 |
35 | # Installer logs
36 | pip-log.txt
37 | pip-delete-this-directory.txt
38 |
39 | # Unit test / coverage reports
40 | htmlcov/
41 | .tox/
42 | .nox/
43 | .coverage
44 | .coverage.*
45 | .cache
46 | nosetests.xml
47 | coverage.xml
48 | *.cover
49 | *.py,cover
50 | .hypothesis/
51 | .pytest_cache/
52 | cover/
53 | allure_report/
54 | results.xml
55 |
56 | # Translations
57 | *.mo
58 | *.pot
59 |
60 | # Django stuff:
61 | *.log
62 | local_settings.py
63 | db.sqlite3
64 | db.sqlite3-journal
65 |
66 | # Flask stuff:
67 | instance/
68 | .webassets-cache
69 |
70 | # Scrapy stuff:
71 | .scrapy
72 |
73 | # Sphinx documentation
74 | docs/_build/
75 |
76 | # PyBuilder
77 | .pybuilder/
78 | target/
79 |
80 | # Jupyter Notebook
81 | .ipynb_checkpoints
82 |
83 | # IPython
84 | profile_default/
85 | ipython_config.py
86 |
87 | # pyenv
88 | # For a library or package, you might want to ignore these files since the code is
89 | # intended to run in multiple environments; otherwise, check them in:
90 | # .python-version
91 |
92 | # pipenv
93 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
94 | # However, in case of collaboration, if having platform-specific dependencies or dependencies
95 | # having no cross-platform support, pipenv may install dependencies that don't work, or not
96 | # install all needed dependencies.
97 | #Pipfile.lock
98 |
99 | # poetry
100 | # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
101 | # This is especially recommended for binary packages to ensure reproducibility, and is more
102 | # commonly ignored for libraries.
103 | # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
104 | #poetry.lock
105 | # Poetry local configuration file - https://python-poetry.org/docs/configuration/#local-configuration
106 | poetry.toml
107 |
108 | # pdm
109 | # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
110 | #pdm.lock
111 | # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
112 | # in version control.
113 | # https://pdm.fming.dev/#use-with-ide
114 | .pdm-python
115 |
116 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
117 | __pypackages__/
118 |
119 | # Celery stuff
120 | celerybeat-schedule
121 | celerybeat.pid
122 |
123 | # SageMath parsed files
124 | *.sage.py
125 |
126 | # Environments
127 | .env
128 | .venv
129 | env/
130 | venv/
131 | ENV/
132 | env.bak/
133 | venv.bak/
134 |
135 | # Spyder project settings
136 | .spyderproject
137 | .spyproject
138 |
139 | # Rope project settings
140 | .ropeproject
141 |
142 | # mkdocs documentation
143 | /site
144 |
145 | # mypy
146 | .mypy_cache/
147 | .dmypy.json
148 | dmypy.json
149 |
150 | # Pyre type checker
151 | .pyre/
152 |
153 | # pytype static type analyzer
154 | .pytype/
155 |
156 | # Cython debug symbols
157 | cython_debug/
158 |
159 | # ruff
160 | .ruff_cache/
161 |
162 | # LSP config files
163 | pyrightconfig.json
164 |
165 | # PyCharm
166 | # JetBrains specific template is maintained in a separate JetBrains.gitignore that can
167 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
168 | # and can be added to the global gitignore or merged into this file. For a more nuclear
169 | # option (not recommended) you can uncomment the following to ignore the entire idea folder.
170 | #.idea/
171 |
172 | # VisualStudioCode
173 | .vscode/*
174 | !.vscode/settings.json
175 | !.vscode/tasks.json
176 | !.vscode/launch.json
177 | !.vscode/extensions.json
178 | !.vscode/*.code-snippets
179 |
180 | # VS Code Counter
181 | .VSCodeCounter/
182 |
--------------------------------------------------------------------------------
/mango/utils.py:
--------------------------------------------------------------------------------
1 | import re
2 | from collections.abc import Callable, Generator, Iterable, Sequence
3 | from types import UnionType
4 | from typing import TYPE_CHECKING, Any
5 |
6 | import pydantic
7 |
8 | from mango.fields import FieldInfo
9 | from mango.index import Index, IndexType
10 |
11 | if TYPE_CHECKING: # pragma: no cover
12 | from pydantic import BaseModel
13 |
14 | from mango.models import Document
15 |
16 |
17 | def to_snake_case(string: str) -> str:
18 | """将字符串转换为蛇形命名法"""
19 | tmp = re.sub("(.)([A-Z][a-z]+)", r"\1_\2", string)
20 | return re.sub("([a-z0-9])([A-Z])", r"\1_\2", tmp).lower()
21 |
22 |
23 | def all_check(
24 | iter_obj: Iterable[object],
25 | type_or_func: type
26 | | UnionType
27 | | Callable
28 | | tuple[type | UnionType | tuple[Any, ...], ...],
29 | ) -> bool:
30 | """
31 | 如果可迭代对象中的所有元素为指定类型,则返回True。
32 | 如果可迭代对象为空,则返回True。
33 | """
34 | if isinstance(type_or_func, Callable):
35 | return all(type_or_func(obj) for obj in iter_obj)
36 | return all(isinstance(obj, type_or_func) for obj in iter_obj)
37 |
38 |
39 | def any_check(
40 | iter_obj: Iterable[object],
41 | type_or_func: type
42 | | UnionType
43 | | Callable
44 | | tuple[type | UnionType | tuple[Any, ...], ...],
45 | ) -> bool:
46 | """
47 | 如果可迭代对象中的任意元素为指定类型,则返回True。
48 | 如果可迭代对象为空,则返回False。
49 | """
50 | if isinstance(type_or_func, Callable):
51 | return any(type_or_func(obj) for obj in iter_obj)
52 | return any(isinstance(obj, type_or_func) for obj in iter_obj)
53 |
54 |
55 | def is_sequence(
56 | iter_obj: Sequence[object],
57 | ) -> bool:
58 | """判断是否为非字符串的序列对象"""
59 | return isinstance(iter_obj, Sequence) and not isinstance(iter_obj, bytes | str)
60 |
61 |
62 | def validate_fields(
63 | model: type["Document"], input_data: dict[str, Any]
64 | ) -> dict[str, Any]:
65 | """验证模型的指定字段"""
66 | if miss := set(input_data) - set(model.model_fields):
67 | raise ValueError(f"这些字段在 {model.__name__} 中不存在: {miss}")
68 |
69 | fields = {
70 | k: (v.annotation, v.get_default())
71 | for k, v in model.model_fields.items()
72 | if k in input_data
73 | }
74 | new_model: "BaseModel" = pydantic.create_model(model.__name__, **fields) # type: ignore
75 | new_model.model_validate(input_data)
76 |
77 | return input_data
78 |
79 |
80 | def add_fields(model: type["Document"], **field_definitions: Any) -> None:
81 | """动态添加字段
82 |
83 | 来源见: https://github.com/pydantic/pydantic/issues/1937
84 | """
85 | new_fields: dict[str, FieldInfo] = {}
86 | new_annotations: dict[str, type | None] = {}
87 |
88 | for f_name, f_def in field_definitions.items():
89 | if isinstance(f_def, tuple):
90 | try:
91 | f_annotation, f_value = f_def
92 | except ValueError as e:
93 | raise ValueError(
94 | "field definitions should either be a tuple of (
2 |
3 |
4 | 🥭 带有类型提示的 Python 异步 MongoDB 对象文档映射器
5 |
6 |
Mango
7 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
21 | 查看演示 22 | · 23 | 错误报告 24 | · 25 | 功能请求 26 |
27 |28 | 简体中文 29 | · 30 | English 31 | · 32 | 日本語 33 |
34 | 35 | ## 🔖 目录 36 | 37 |[⬆回到顶部]
80 | 81 | ## 🚀 安装 82 | 83 | ### PIP 84 | 85 | ```shell 86 | pip install mango-odm 87 | ``` 88 | ### Poetry 89 | 90 | ```shell 91 | poetry add mango-odm 92 | ``` 93 | 94 |[⬆回到顶部]
95 | 96 | ## 🌟 示例 97 | 98 | ```python 99 | import asyncio 100 | 101 | from mango import Document, EmbeddedDocument, Field, Mango 102 | 103 | 104 | # 嵌入式文档 105 | class Author(EmbeddedDocument): 106 | name: str 107 | profile: str | None = None 108 | 109 | 110 | # Mango 文档模型 111 | class Book(Document): 112 | name: str = Field(primary_key=True) # 将字段设置为主键,如果不显式指定主键,则会自动创建 id 字段作为主键 113 | summary: str | None = None 114 | author: Author # 嵌入文档 115 | price: int = Field(index=True) # 为字段添加索引 116 | 117 | 118 | async def main(): 119 | # 初始化 Mango,它会创建连接并初始化文档模型,你可以传入 db 或者 uri 参数来指定连接 120 | await Mango.init() 121 | 122 | # 像 pydantic 的模型一样使用 123 | book = Book(name="book", author=Author(name="author"), price=10) 124 | # 将它插入到数据库中 125 | await book.save() 126 | 127 | # Mango 提供了丰富的查询语言,允许您使用 Python 表达式来查询 128 | if book := await Book.find(Book.price <= 20, Book.author.name == "author").get(): 129 | # 更新文档的 summary 字段 130 | book.summary = "summary" 131 | await book.update() 132 | 133 | 134 | if __name__ == "__main__": 135 | asyncio.run(main()) 136 | 137 | ``` 138 | 139 |[⬆回到顶部]
140 | 141 | ## 🤝 贡献 142 | 143 | 想为这个项目做出一份贡献吗?[点击这里]()阅读并了解如何贡献。 144 | 145 | ### 🎉 鸣谢 146 | 147 | 感谢以下开发者对该项目做出的贡献: 148 | 149 | 150 |[⬆回到顶部]
154 | 155 | ## 💖 支持 156 | 157 | 喜欢这个项目?请点亮 star 并分享它! 158 | 159 |[⬆回到顶部]
160 | 161 | ## 📝 许可证 162 | 163 | 在 `MIT` 许可证下分发。请参阅 [LICENSE](./LICENSE) 以获取更多信息。 164 | 165 |[⬆回到顶部]
-------------------------------------------------------------------------------- /mango/drive.py: -------------------------------------------------------------------------------- 1 | import os 2 | from collections.abc import Iterator 3 | from typing import Any, ClassVar 4 | 5 | from motor.motor_asyncio import ( 6 | AsyncIOMotorClient, 7 | AsyncIOMotorCollection, 8 | AsyncIOMotorDatabase, 9 | ) 10 | from typing_extensions import Self 11 | 12 | DEFAULT_CONNECT_URI = os.getenv("MANGO_URI") or "mongodb://localhost:27017" 13 | DEFAULT_DATABASE_NAME = "test" 14 | 15 | 16 | class Collection: 17 | def __init__(self, collection: AsyncIOMotorCollection) -> None: 18 | self.collection = collection 19 | 20 | def __getattr__(self, name: str) -> Any: 21 | return getattr(self.collection, name) 22 | 23 | def __repr__(self) -> str: 24 | return f"Collection(name={self.name}, db={self.full_name.split('.')[0]})" 25 | 26 | @property 27 | def name(self) -> str: 28 | return self.collection.name 29 | 30 | @property 31 | def full_name(self) -> str: 32 | return self.collection.full_name 33 | 34 | 35 | class Database: 36 | def __init__(self, db: AsyncIOMotorDatabase) -> None: 37 | self.db = db 38 | self.collections: dict[str, Collection] = {} 39 | 40 | def __getattr__(self, name: str) -> Collection: 41 | try: 42 | return self.collections[name] 43 | except KeyError: 44 | collection = self.db.get_collection(name) 45 | self.collections[name] = Collection(collection) 46 | return self.collections[name] 47 | 48 | def __getitem__(self, name: str) -> Collection: 49 | return self.__getattr__(name) 50 | 51 | def __iter__(self) -> Iterator[Collection]: 52 | return iter(self.collections.values()) 53 | 54 | def __repr__(self) -> str: 55 | return ( 56 | f"Database(name={self.name}, " 57 | f"host={self.client.HOST}, " 58 | f"port={self.client.PORT})" 59 | ) 60 | 61 | async def drop_collection(self, collection: str | Collection) -> None: 62 | """删除集合""" 63 | name = collection if isinstance(collection, str) else collection.name 64 | await self.db.drop_collection(name) 65 | self.collections.pop(name, None) 66 | 67 | @property 68 | def name(self) -> str: 69 | return self.db.name 70 | 71 | @property 72 | def client(self) -> AsyncIOMotorClient: 73 | return self.db.client 74 | 75 | 76 | class Client: 77 | _clients: ClassVar[set[Self]] = set() 78 | 79 | def __init__(self, uri: str = DEFAULT_CONNECT_URI, **kwargs: Any) -> None: 80 | kwargs.setdefault("host", uri) 81 | self.client = AsyncIOMotorClient(**kwargs) 82 | self.databases: dict[str, Database] = {} 83 | self.__class__._clients.add(self) 84 | 85 | def __getattr__(self, name: str) -> Database: 86 | try: 87 | return self.databases[name] 88 | except KeyError: 89 | db = self.client.get_database(name) 90 | self.databases[name] = Database(db) 91 | return self.databases[name] 92 | 93 | def __getitem__(self, name: str) -> Database: 94 | return self.__getattr__(name) 95 | 96 | def __iter__(self) -> Iterator[Database]: 97 | return iter(self.databases.values()) 98 | 99 | def __repr__(self) -> str: 100 | return f"Client(host={self.host}, port={self.port})" 101 | 102 | def close(self) -> None: 103 | """关闭连接""" 104 | self.client.close() 105 | self.__class__._clients.remove(self) 106 | 107 | async def drop_database(self, database: str | Database) -> None: 108 | """删除数据库""" 109 | name = database if isinstance(database, str) else database.name 110 | await self.client.drop_database(name) 111 | self.databases.pop(name, None) 112 | 113 | @classmethod 114 | def get_database(cls, db: Database | str | None = None) -> Database: 115 | """获取数据库""" 116 | try: 117 | client = next(iter(cls._clients)) 118 | except StopIteration: 119 | client = cls() 120 | 121 | if isinstance(db, Database): 122 | return db 123 | return client[db] if db else client.default_database 124 | 125 | @property 126 | def default_database(self) -> Database: 127 | """默认为首次调用的数据库, 如果不存在, 则创建 test 数据库""" 128 | try: 129 | return next(iter(self.databases.values())) 130 | except StopIteration: 131 | ddb = self.client.get_default_database(DEFAULT_DATABASE_NAME) 132 | return self[ddb.name] 133 | 134 | @property 135 | def host(self) -> str: 136 | return self.client.HOST 137 | 138 | @property 139 | def port(self) -> int: 140 | return self.client.PORT 141 | 142 | @property 143 | def address(self) -> tuple[str, int] | None: 144 | return self.client.address 145 | -------------------------------------------------------------------------------- /mango/fields.py: -------------------------------------------------------------------------------- 1 | from collections.abc import Callable 2 | from typing import Any, AnyStr, Literal 3 | 4 | from bson import ObjectId 5 | from pydantic import GetJsonSchemaHandler 6 | from pydantic.aliases import AliasChoices, AliasPath 7 | from pydantic.config import JsonDict 8 | from pydantic.fields import FieldInfo as PDFieldInfo 9 | from pydantic.fields import _FromFieldInfoInputs 10 | from pydantic.json_schema import JsonSchemaValue 11 | from pydantic.types import Discriminator 12 | from pydantic_core import CoreSchema, PydanticUndefined, core_schema 13 | from typing_extensions import Self, Unpack 14 | 15 | from mango.index import Index, IndexType 16 | 17 | 18 | class ObjectIdField(ObjectId): 19 | @classmethod 20 | def __get_pydantic_core_schema__( 21 | cls, 22 | _source_type: Any, 23 | _handler: Callable[[Any], core_schema.CoreSchema], 24 | ) -> CoreSchema: 25 | from_any_str_schema = core_schema.chain_schema( 26 | [ 27 | core_schema.union_schema( 28 | [ 29 | core_schema.str_schema(), 30 | core_schema.bytes_schema(), 31 | ] 32 | ), 33 | core_schema.no_info_plain_validator_function(cls.validate), 34 | ] 35 | ) 36 | 37 | return core_schema.json_or_python_schema( 38 | json_schema=from_any_str_schema, 39 | python_schema=core_schema.union_schema( 40 | [ 41 | core_schema.is_instance_schema(ObjectId), 42 | from_any_str_schema, 43 | ], 44 | ), 45 | serialization=core_schema.plain_serializer_function_ser_schema( 46 | str, when_used="json" 47 | ), 48 | ) 49 | 50 | @classmethod 51 | def validate(cls, v: AnyStr) -> Self: 52 | if cls.is_valid(v): 53 | return cls(v) 54 | raise ValueError("无效的 ObjectId") 55 | 56 | @classmethod 57 | def __get_pydantic_json_schema__( 58 | cls, _core_schema: core_schema.CoreSchema, handler: GetJsonSchemaHandler 59 | ) -> JsonSchemaValue: 60 | return handler(core_schema.str_schema()) 61 | 62 | 63 | class FromFieldInfoInputs(_FromFieldInfoInputs, total=False): 64 | primary_key: bool 65 | index: bool | IndexType | Index | None 66 | expire: int | None 67 | unique: bool 68 | 69 | 70 | class FieldInfoInputs(FromFieldInfoInputs, total=False): 71 | default: Any 72 | 73 | 74 | class FieldInfo(PDFieldInfo): 75 | def __init__(self, **kwargs: Unpack[FieldInfoInputs]) -> None: 76 | self.primary_key: bool = kwargs.pop("primary_key", False) 77 | self.index: bool | IndexType | Index | None = kwargs.pop("index", None) 78 | self.expire: int | None = kwargs.pop("expire", None) 79 | self.unique: bool = kwargs.pop("unique", False) 80 | super().__init__(**kwargs) 81 | 82 | @staticmethod 83 | def from_field( 84 | default: Any = PydanticUndefined, **kwargs: Unpack[FromFieldInfoInputs] 85 | ) -> "FieldInfo": 86 | if "annotation" in kwargs: 87 | raise TypeError('"annotation" is not permitted as a Field keyword argument') 88 | return FieldInfo(default=default, **kwargs) 89 | 90 | 91 | def Field( 92 | default: Any = PydanticUndefined, 93 | *, 94 | default_factory: Callable[[], Any] | None = None, 95 | alias: str | None = None, 96 | alias_priority: int | None = None, 97 | validation_alias: str | AliasPath | AliasChoices | None = None, 98 | serialization_alias: str | None = None, 99 | title: str | None = None, 100 | description: str | None = None, 101 | examples: list[Any] | None = None, 102 | exclude: bool | None = None, 103 | discriminator: str | Discriminator | None = None, 104 | json_schema_extra: JsonDict | Callable[[JsonDict], None] | None = None, 105 | frozen: bool | None = None, 106 | validate_default: bool | None = None, 107 | repr: bool = True, 108 | init: bool | None = None, 109 | init_var: bool | None = None, 110 | kw_only: bool | None = None, 111 | pattern: str | None = None, 112 | strict: bool | None = None, 113 | gt: float | None = None, 114 | ge: float | None = None, 115 | lt: float | None = None, 116 | le: float | None = None, 117 | multiple_of: float | None = None, 118 | allow_inf_nan: bool | None = None, 119 | max_digits: int | None = None, 120 | decimal_places: int | None = None, 121 | min_length: int | None = None, 122 | max_length: int | None = None, 123 | union_mode: Literal["smart", "left_to_right"] | None = None, 124 | primary_key: bool = False, 125 | index: bool | IndexType | Index | None = None, 126 | expire: int | None = None, 127 | unique: bool = False, 128 | ) -> Any: 129 | """ 130 | primary_key: 主键 131 | index: 索引 132 | expire: 到期时间, int 表示创建后多少秒后到期, datetime 表示到期时间 133 | unique: 唯一索引 134 | """ 135 | return FieldInfo.from_field( 136 | default, 137 | default_factory=default_factory, 138 | alias=alias, 139 | alias_priority=alias_priority, 140 | validation_alias=validation_alias, 141 | serialization_alias=serialization_alias, 142 | title=title, 143 | description=description, 144 | examples=examples, 145 | exclude=exclude, 146 | discriminator=discriminator, 147 | json_schema_extra=json_schema_extra, 148 | frozen=primary_key or frozen, 149 | pattern=pattern, 150 | validate_default=validate_default, 151 | repr=repr, 152 | init=init, 153 | init_var=init_var, 154 | kw_only=kw_only, 155 | strict=strict, 156 | gt=gt, 157 | ge=ge, 158 | lt=lt, 159 | le=le, 160 | multiple_of=multiple_of, 161 | min_length=min_length, 162 | max_length=max_length, 163 | allow_inf_nan=allow_inf_nan, 164 | max_digits=max_digits, 165 | decimal_places=decimal_places, 166 | union_mode=union_mode, 167 | primary_key=primary_key, 168 | index=index, 169 | expire=expire, 170 | unique=unique, 171 | ) 172 | -------------------------------------------------------------------------------- /mango/expression.py: -------------------------------------------------------------------------------- 1 | from dataclasses import dataclass 2 | from enum import Enum, auto 3 | from typing import TYPE_CHECKING, Any 4 | 5 | from pydantic._internal import _repr 6 | from typing_extensions import Self 7 | 8 | from mango.fields import FieldInfo 9 | 10 | if TYPE_CHECKING: # pragma: no cover 11 | from mango.models import Document 12 | 13 | 14 | class Operators(Enum): 15 | EQ = auto() 16 | """等于""" 17 | NE = auto() 18 | """不等于""" 19 | LT = auto() 20 | """小于""" 21 | LTE = auto() 22 | """小于或等于""" 23 | GT = auto() 24 | """大于""" 25 | GTE = auto() 26 | """大于或等于""" 27 | OR = auto() 28 | """逻辑或""" 29 | AND = auto() 30 | """逻辑与""" 31 | NOR = auto() 32 | """逻辑非或""" 33 | IN = auto() 34 | """包含在""" 35 | NIN = auto() 36 | """不包含在""" 37 | REGEX = auto() 38 | """正则匹配""" 39 | 40 | def __str__(self) -> str: 41 | return f"${self.name.lower()}" 42 | 43 | 44 | class ExpressionField: 45 | def __init__( 46 | self, name: str, field: FieldInfo, parents: list[tuple[str, "Document"]] 47 | ) -> None: 48 | self.__name = name 49 | self.__field = field 50 | self.__parents = parents 51 | 52 | def __eq__(self, other: object) -> "Expression": 53 | return OPR(self).eq(other) 54 | 55 | def __ne__(self, other: object) -> "Expression": 56 | return OPR(self).ne(other) 57 | 58 | def __lt__(self, other: Any) -> "Expression": 59 | return OPR(self).lt(other) 60 | 61 | def __le__(self, other: Any) -> "Expression": 62 | return OPR(self).lte(other) 63 | 64 | def __gt__(self, other: Any) -> "Expression": 65 | return OPR(self).gt(other) 66 | 67 | def __ge__(self, other: Any) -> "Expression": 68 | return OPR(self).gte(other) 69 | 70 | def __hash__(self) -> int: 71 | return super().__hash__() 72 | 73 | def __repr__(self) -> str: 74 | annotation = _repr.PlainRepr(_repr.display_as_type(self.__field.annotation)) 75 | return f"ExpressionField(name={self!s}, type={annotation})" 76 | 77 | def __str__(self) -> str: 78 | names = [p[0] for p in self.__parents] 79 | if isinstance(self.__field, FieldInfo) and self.__field.primary_key: 80 | names.append("_id") 81 | else: 82 | names.append(self.__name) 83 | return ".".join(names) 84 | 85 | def __getattr__(self, name: str) -> Any: 86 | """内嵌查询反向查找分配父级""" 87 | attr = object.__getattribute__(self.__field.annotation, name) 88 | if isinstance(attr, self.__class__) and self.__field.annotation: 89 | new_parent = (self.__name, self.__field.annotation) 90 | if new_parent not in attr.__parents: 91 | attr.__parents.append(new_parent) 92 | if new_parents := list(set(self.__parents) - set(attr.__parents)): 93 | attr.__parents = new_parents + attr.__parents 94 | return attr 95 | 96 | 97 | @dataclass 98 | class Expression: 99 | key: ExpressionField | None 100 | operator: Operators 101 | value: Any 102 | 103 | def __or__(self, other: Self) -> "Expression": 104 | return OPR.or_(self, other) 105 | 106 | def __and__(self, other: Self) -> "Expression": 107 | return OPR.and_(self, other) 108 | 109 | def __repr__(self) -> str: 110 | return f"Expression({self.struct()})" 111 | 112 | def struct(self) -> dict[str, Any]: 113 | """转换为 MongoDB 查询结构""" 114 | value = {str(self.operator): self.unpack(self.value)} 115 | return {str(self.key): value} if self.key else value 116 | 117 | def unpack(self, value: Any) -> Any: 118 | # TODO: 将嵌入文档模型转换为 mongodb 文档形式 119 | return value 120 | 121 | def merge(self, operator: Operators) -> Any: 122 | return self.value if self.operator is operator else [self] 123 | 124 | 125 | class OPR: 126 | def __init__(self, key: Any) -> None: 127 | if not isinstance(key, ExpressionField): 128 | raise TypeError("必须是有效的字段") 129 | self.key = key 130 | 131 | def eq(self, value: Any) -> Expression: 132 | return Expression(self.key, Operators.EQ, value) 133 | 134 | def ne(self, value: Any) -> Expression: 135 | return Expression(self.key, Operators.NE, value) 136 | 137 | def lt(self, value: Any) -> Expression: 138 | return Expression(self.key, Operators.LT, value) 139 | 140 | def lte(self, value: Any) -> Expression: 141 | return Expression(self.key, Operators.LTE, value) 142 | 143 | def gt(self, value: Any) -> Expression: 144 | return Expression(self.key, Operators.GT, value) 145 | 146 | def gte(self, value: Any) -> Expression: 147 | return Expression(self.key, Operators.GTE, value) 148 | 149 | def in_(self, *values: Any) -> Expression: 150 | """包含在""" 151 | return Expression(self.key, Operators.IN, values) 152 | 153 | def nin(self, *values: Any) -> Expression: 154 | """不包含在""" 155 | return Expression(self.key, Operators.NIN, values) 156 | 157 | def regex(self, values: str) -> Expression: 158 | """正则匹配""" 159 | return Expression(self.key, Operators.REGEX, values) 160 | 161 | @classmethod 162 | def or_(cls, *expressions: Expression | bool) -> Expression: 163 | merge = cls._merge(Operators.OR, expressions) 164 | return Expression(None, *merge) 165 | 166 | @classmethod 167 | def and_(cls, *expressions: Expression | bool) -> Expression: 168 | merge = cls._merge(Operators.AND, expressions) 169 | return Expression(None, *merge) 170 | 171 | @classmethod 172 | def nor(cls, *expressions: Expression | bool) -> Expression: 173 | """既不也不""" 174 | merge = cls._merge(Operators.NOR, expressions) 175 | return Expression(None, *merge) 176 | 177 | @classmethod 178 | def _merge( 179 | cls, operator: Operators, expressions: tuple[Expression | bool, ...] 180 | ) -> tuple[Operators, list[Expression]]: 181 | merge_expr: list[Expression] = [] 182 | for expression in expressions: 183 | if not isinstance(expression, Expression): 184 | raise TypeError("必须是有效的表达式") 185 | 186 | if expression.operator is operator: 187 | merge_expr.extend(expression.value) 188 | else: 189 | merge_expr.append(expression) 190 | 191 | return operator, merge_expr 192 | -------------------------------------------------------------------------------- /mango/result.py: -------------------------------------------------------------------------------- 1 | from collections.abc import AsyncGenerator, Generator, Mapping 2 | from typing import TYPE_CHECKING, Any, Generic, TypeAlias, TypeVar 3 | 4 | from motor.motor_asyncio import AsyncIOMotorCursor, AsyncIOMotorLatentCommandCursor 5 | from pydantic import BaseModel 6 | 7 | from mango.expression import Expression, ExpressionField 8 | from mango.index import Order 9 | from mango.utils import any_check, is_sequence, validate_fields 10 | 11 | if TYPE_CHECKING: # pragma: no cover 12 | from pymongo.results import DeleteResult 13 | 14 | from mango.models import Document 15 | 16 | T_Model = TypeVar("T_Model", bound="Document") 17 | 18 | KeyField: TypeAlias = str | ExpressionField 19 | 20 | FindMapping: TypeAlias = Mapping[KeyField, Any] 21 | 22 | DirectionType: TypeAlias = Order 23 | 24 | SortType: TypeAlias = tuple[str, DirectionType] 25 | 26 | 27 | class FindOptions(BaseModel): 28 | limit: int = 0 29 | skip: int = 0 30 | sort: list[SortType] = [] 31 | 32 | def kwdict(self, *exclude: str) -> dict[str, Any]: 33 | return self.model_dump(exclude=set(exclude), exclude_defaults=True) 34 | 35 | 36 | class FindResult(Generic[T_Model]): 37 | def __init__( 38 | self, 39 | model: type[T_Model], 40 | *filter: FindMapping | Expression, 41 | ) -> None: 42 | self.model = model 43 | self.collection = model.__collection__ 44 | self._filter = filter 45 | self.options = FindOptions() 46 | 47 | def __await__(self) -> Generator[None, None, list[T_Model]]: 48 | """`await` : 等待时,将返回获取的模型列表""" 49 | documents = yield from self.cursor.to_list(length=None).__await__() 50 | instances: list[T_Model] = [] 51 | for document in documents: 52 | instances.append(self.model.from_doc(document)) 53 | yield 54 | return instances 55 | 56 | async def __aiter__(self) -> AsyncGenerator[T_Model, None]: 57 | """`async for`: 异步迭代查询结果""" 58 | async for document in self.cursor: # type: ignore 59 | yield self.model.from_doc(document) 60 | 61 | @property 62 | def cursor(self) -> AsyncIOMotorCursor: 63 | return self.collection.find(self.filter, **self.options.kwdict()) 64 | 65 | @property 66 | def filter(self) -> dict[str, Any]: 67 | """查询过滤条件""" 68 | compiled: dict[str, Any] = {} 69 | for condition in self._filter: 70 | if isinstance(condition, Mapping): 71 | condition = dict(condition) # noqa: PLW2901 72 | elif isinstance(condition, Expression): 73 | condition = condition.struct() # noqa: PLW2901 74 | else: 75 | raise TypeError("查询过滤条件不正确, 应为映射或表达式") 76 | compiled |= self._compile(condition) 77 | return compiled 78 | 79 | def _compile( 80 | self, 81 | source: Mapping[KeyField, Any] | Mapping[str, Any], 82 | ) -> dict[str, Any]: 83 | compiled: dict[str, Any] = {} 84 | 85 | for key, value in source.items(): 86 | key = str(key) # noqa: PLW2901 87 | if isinstance(value, Expression): 88 | compiled[key] = value.struct() 89 | elif isinstance(value, Mapping): 90 | compiled[key] = self._compile(value) 91 | elif is_sequence(value): 92 | compiled[key] = [] 93 | for i in value: 94 | if isinstance(i, Expression): 95 | i = i.struct() # noqa: PLW2901 96 | if isinstance(i, Mapping): 97 | i = self._compile(i) # noqa: PLW2901 98 | compiled[key].append(i) 99 | else: 100 | compiled[key] = value 101 | 102 | return compiled 103 | 104 | def limit(self, limit: int = 0) -> "FindResult[T_Model]": 105 | """限制查询条件返回结果的数量""" 106 | self.options.limit += limit 107 | return self 108 | 109 | def skip(self, skip: int = 0) -> "FindResult[T_Model]": 110 | """跳过指定数目的文档""" 111 | self.options.skip += skip 112 | return self 113 | 114 | def sort(self, *orders: Any) -> "FindResult[T_Model]": 115 | """对查询文档流进行排序""" 116 | if not (len(orders) != 2 or any_check(orders, is_sequence)): # noqa: PLR2004 117 | orders = (orders,) 118 | for order in orders: 119 | direction = Order.ASC 120 | if is_sequence(order): 121 | key, direction = order 122 | else: 123 | key = order 124 | 125 | try: 126 | key, direction = str(key), Order(direction) 127 | except ValueError as e: 128 | raise TypeError( 129 | "键应为字符串或字段, 排序方向应为 Order 枚举成员" 130 | ) from e 131 | else: 132 | self.options.sort.append((key, direction)) 133 | 134 | return self 135 | 136 | def asc(self, *keys: Any) -> "FindResult[T_Model]": 137 | """升序排列""" 138 | self.options.sort.extend(self._sort_order(*keys)) 139 | return self 140 | 141 | def desc(self, *keys: Any) -> "FindResult[T_Model]": 142 | """降序排列""" 143 | self.options.sort.extend(self._sort_order(*keys, reverse=True)) 144 | return self 145 | 146 | def _sort_order( 147 | self, *keys: Any, reverse: bool = False 148 | ) -> Generator[SortType, None, None]: 149 | direction: Order = Order.DESC if reverse else Order.ASC 150 | for key in keys: 151 | yield str(key), direction 152 | 153 | async def count(self) -> int: 154 | """获得符合条件的文档总数""" 155 | return await self.collection.count_documents( 156 | self.filter, **self.options.kwdict("sort") 157 | ) 158 | 159 | async def get(self) -> T_Model | None: 160 | """ 161 | 从数据库中获取单个文档。 162 | 返回单个文档,如果没有找到匹配的文档,返回“None”。 163 | """ 164 | if document := await self.collection.find_one(self.filter): 165 | return self.model.from_doc(document) 166 | return None 167 | 168 | async def delete(self) -> int: 169 | """删除符合条件的文档""" 170 | result: DeleteResult = await self.collection.delete_many(self.filter) 171 | return result.deleted_count 172 | 173 | async def update(self, **kwargs: Any) -> None: 174 | """使用提供的信息更新查找到的文档""" 175 | values = validate_fields(self.model, kwargs) 176 | await self.collection.update_many(self.filter, {"$set": values}) 177 | 178 | 179 | class AggregateResult: 180 | def __init__(self, cursor: AsyncIOMotorLatentCommandCursor) -> None: 181 | self.cursor = cursor 182 | 183 | def __await__(self) -> Generator[None, None, list[Mapping[str, Any]]]: 184 | """ 185 | `await` : 等待时,将返回聚合管道的结果文档列表 186 | """ 187 | return (yield from self.cursor.to_list(length=None).__await__()) 188 | 189 | async def __aiter__(self) -> AsyncGenerator[Mapping[str, Any], None]: 190 | """`async for`: 异步迭代聚合管道的结果文档""" 191 | async for document in self.cursor: # type: ignore 192 | yield document 193 | -------------------------------------------------------------------------------- /mango/models.py: -------------------------------------------------------------------------------- 1 | import contextlib 2 | from collections.abc import Mapping, MutableMapping, Sequence 3 | from functools import reduce 4 | from typing import TYPE_CHECKING, Any, ClassVar 5 | 6 | import bson 7 | from bson import ObjectId 8 | from pydantic import BaseModel, ConfigDict 9 | from pydantic._internal._model_construction import ModelMetaclass 10 | from typing_extensions import Self, dataclass_transform 11 | 12 | from mango.encoder import Encoder 13 | from mango.expression import Expression, ExpressionField, Operators 14 | from mango.fields import Field, FieldInfo, ObjectIdField 15 | from mango.meta import MetaConfig 16 | from mango.result import AggregateResult, FindMapping, FindResult 17 | from mango.source import Mango 18 | from mango.stage import Pipeline 19 | from mango.utils import add_fields, all_check, validate_fields 20 | 21 | if TYPE_CHECKING: 22 | from bson.codec_options import CodecOptions 23 | from pymongo.results import DeleteResult, UpdateResult 24 | 25 | from mango.drive import Collection, Database 26 | 27 | operators = tuple(str(i) for i in Operators) 28 | 29 | 30 | def is_need_default_pk( 31 | bases: tuple[type[Any], ...], annotate: dict[str, Any] | None = None 32 | ) -> bool: 33 | # 未定义任何字段 34 | if not annotate: 35 | return False 36 | 37 | # 存在 id 字段但未定义主键 38 | if "id" in annotate: 39 | return False 40 | 41 | # 存在 id 字段但未定义主键,且其被继承 42 | return not any(getattr(base, "id", None) for base in bases) 43 | 44 | 45 | def set_default_pk(model: type["Document"]) -> None: 46 | add_fields( 47 | model, 48 | id=( 49 | ObjectIdField, 50 | { 51 | "default_factory": ObjectId, 52 | "primary_key": True, 53 | "frozen": True, 54 | "init": False, 55 | }, 56 | ), 57 | ) 58 | model.__primary_key__ = "id" 59 | 60 | 61 | def flat_filter(data: Mapping[str, Any]) -> dict[str, Any]: 62 | flatted = {} 63 | for key, value in data.items(): 64 | if key.startswith(operators): 65 | flatted |= flat_filter(reduce(lambda x, y: x | y, value)) 66 | elif "." in key: 67 | parent, child = key.split(".", maxsplit=1) 68 | flatted[parent] = flat_filter({child: value}) 69 | else: 70 | for operator in operators: 71 | if isinstance(value, dict) and operator in value: 72 | flatted[key] = value[operator] 73 | return flatted 74 | 75 | 76 | def merge_map(data: MutableMapping[Any, Any], into: Mapping[Any, Any]) -> None: 77 | for k, v in into.items(): 78 | k = str(k) # noqa: PLW2901 79 | if isinstance(data.get(k), dict) and isinstance(v, dict | EmbeddedDocument): 80 | merge_map(data[k], v if isinstance(v, dict) else v.model_dump()) 81 | else: 82 | data[k] = v 83 | 84 | 85 | config_keys = set(MetaConfig.__annotations__.keys()) 86 | 87 | 88 | def merge_config( 89 | bases: tuple[type[Any], ...], attrs: dict[str, Any], kwargs: dict[str, Any] 90 | ) -> MetaConfig: 91 | config = MetaConfig() 92 | 93 | for base in bases: 94 | if cfg := getattr(base, "meta_config", None): 95 | config.update(cfg.copy()) 96 | 97 | config.update(attrs.get("meta_config", MetaConfig())) 98 | 99 | for k in list(kwargs.keys()): 100 | if k in config_keys: 101 | config[k] = kwargs.pop(k) 102 | if k == "db": 103 | config["database"] = kwargs.pop(k) 104 | 105 | return config 106 | 107 | 108 | @dataclass_transform(kw_only_default=True, field_specifiers=(Field, FieldInfo)) 109 | class MetaDocument(ModelMetaclass): 110 | def __new__( 111 | cls, 112 | cname: str, 113 | bases: tuple[type[Any], ...], 114 | attrs: dict[str, Any], 115 | **kwargs: Any, 116 | ) -> Any: 117 | # 跳过基类 118 | if bases == (BaseModel,): 119 | return super().__new__(cls, cname, bases, attrs, **kwargs) 120 | 121 | # 合并配置 122 | attrs["meta_config"] = merge_config(bases, attrs, kwargs) 123 | 124 | # 创建编码器 125 | attrs["__encoder__"] = Encoder.create(attrs["meta_config"].get("bson_encoders")) 126 | 127 | scls = super().__new__(cls, cname, bases, attrs, **kwargs) 128 | 129 | # 设置模型字段 130 | annotations = attrs.get("__annotations__", {}) 131 | for fname in annotations: 132 | field = scls.model_fields[fname] 133 | setattr(scls, fname, ExpressionField(fname, field, [])) 134 | 135 | # 检查主键是否唯一 136 | pk_fields = { 137 | fname: field 138 | for fname, field in scls.model_fields.items() 139 | if isinstance(field, FieldInfo) and field.primary_key 140 | } 141 | if len(pk_fields) > 1: 142 | raise ValueError( 143 | f"文档的主键应唯一, 当前有主键字段: {', '.join(pk_fields.keys())}" 144 | ) 145 | 146 | # 设置显式主键 147 | if len(pk_fields) == 1: 148 | pk_name, pk_filed = next(iter(pk_fields.items())) 149 | scls.__primary_key__ = pk_filed.alias or pk_name 150 | 151 | # 设置默认主键 152 | if not pk_fields and is_need_default_pk(bases, annotations): 153 | set_default_pk(scls) 154 | 155 | # 注册模型 156 | Mango.register_model(scls) 157 | 158 | return scls 159 | 160 | 161 | @dataclass_transform(kw_only_default=True, field_specifiers=(Field, FieldInfo)) 162 | class MetaEmbeddedDocument(ModelMetaclass): 163 | def __new__( 164 | cls, 165 | name: str, 166 | bases: tuple[type[Any], ...], 167 | attrs: dict[str, Any], 168 | **kwargs: Any, 169 | ) -> Any: 170 | scls = super().__new__(cls, name, bases, attrs, **kwargs) 171 | for fname, field in scls.model_fields.items(): 172 | setattr(scls, fname, ExpressionField(fname, field, [])) 173 | if isinstance(field, FieldInfo) and field.primary_key: 174 | raise ValueError("内嵌文档不可设置主键") 175 | return scls 176 | 177 | 178 | class Document(BaseModel, metaclass=MetaDocument): 179 | if TYPE_CHECKING: # pragma: no cover 180 | id: ClassVar[ObjectId] 181 | model_fields: ClassVar[dict[str, FieldInfo]] 182 | __encoder__: ClassVar[CodecOptions] 183 | __collection__: ClassVar[Collection] 184 | __primary_key__: ClassVar[str] 185 | 186 | def __init_subclass__( 187 | cls, 188 | *, 189 | name: str | None = None, 190 | db: Database | str | None = None, 191 | **kwargs: Any, 192 | ) -> None: ... 193 | 194 | meta_config: ClassVar[MetaConfig] = MetaConfig() 195 | model_config = ConfigDict(validate_assignment=True) 196 | 197 | @property 198 | def pk(self) -> Any: 199 | """主键值""" 200 | return getattr(self, self.__primary_key__) 201 | 202 | async def insert(self) -> Self: 203 | """插入文档""" 204 | await self.__collection__.insert_one(self.doc()) 205 | return self 206 | 207 | async def update(self, **kwargs: Any) -> bool: 208 | """更新文档""" 209 | if kwargs: 210 | values = validate_fields(self.__class__, kwargs) 211 | for field, value in values.items(): 212 | setattr(self, field, value) 213 | result: UpdateResult = await self.__collection__.update_one( 214 | {"_id": self.pk}, {"$set": self.doc(exclude={self.__primary_key__})} 215 | ) 216 | return bool(result.modified_count) 217 | 218 | async def save(self, **kwargs: Any) -> Self: 219 | """保存文档,如果文档不存在,则插入,否则更新它。""" 220 | existing_doc = await self.__collection__.find_one({"_id": self.pk}) 221 | if existing_doc: 222 | await self.update(**kwargs) 223 | else: 224 | await self.insert() 225 | return self 226 | 227 | async def delete(self) -> bool: 228 | """删除文档""" 229 | result: DeleteResult = await self.__collection__.delete_one({"_id": self.pk}) 230 | return bool(result.deleted_count) 231 | 232 | def doc(self, **kwargs: Any) -> dict[str, Any]: 233 | """转换为 MongoDB 文档""" 234 | if by_alias := self.meta_config.get("by_alias"): 235 | kwargs.setdefault("by_alias", by_alias) 236 | data = self.model_dump(**kwargs) 237 | pk = self.__primary_key__ 238 | exclude = kwargs.get("exclude") 239 | if not (exclude and pk in exclude): 240 | data["_id"] = data.pop(pk) 241 | return bson.decode(bson.encode(data, codec_options=self.__encoder__)) 242 | 243 | @classmethod 244 | def from_doc(cls, document: Mapping[str, Any]) -> Self: 245 | """从文档构建模型实例""" 246 | doc = dict(document) 247 | with contextlib.suppress(KeyError): 248 | doc[cls.__primary_key__] = doc.pop("_id") 249 | return cls(**doc) 250 | 251 | @classmethod 252 | async def save_all(cls, *documents: Self) -> None: 253 | """保存全部文档""" 254 | await cls.__collection__.insert_many(doc.doc() for doc in documents) 255 | 256 | @classmethod 257 | def aggregate( 258 | cls, pipeline: Pipeline | Sequence[Mapping[str, Any]], *args: Any, **kwargs: Any 259 | ) -> AggregateResult: 260 | """聚合查询""" 261 | cursor = cls.__collection__.aggregate(pipeline, *args, **kwargs) 262 | return AggregateResult(cursor) 263 | 264 | @classmethod 265 | def find( 266 | cls, 267 | *args: FindMapping | Expression | bool, 268 | ) -> FindResult[Self]: 269 | """使用表达式查询文档""" 270 | if all_check(args, Expression | Mapping): 271 | return FindResult(cls, *args) # type: ignore 272 | raise TypeError("查询表达式类型不正确") 273 | 274 | @classmethod 275 | async def get(cls, _id: Any) -> Self | None: 276 | """通过主键查询文档""" 277 | return await cls.find({"_id": _id}).get() 278 | 279 | @classmethod 280 | async def get_or_create( 281 | cls, 282 | *args: FindMapping | Expression | bool, 283 | defaults: FindMapping | Self | None = None, 284 | ) -> Self: 285 | """获取文档, 如果不存在, 则创建""" 286 | result: FindResult[Self] = FindResult(cls, *args) # type: ignore 287 | if model := await result.get(): 288 | return model 289 | default = defaults.doc() if isinstance(defaults, Document) else defaults or {} 290 | data = flat_filter(result.filter) 291 | merge_map(data, default) 292 | model = cls.from_doc(data) 293 | return await model.save() 294 | 295 | 296 | class EmbeddedDocument(BaseModel, metaclass=MetaEmbeddedDocument): 297 | model_config = ConfigDict(validate_assignment=True) 298 | -------------------------------------------------------------------------------- /mango/stage.py: -------------------------------------------------------------------------------- 1 | from collections.abc import Mapping, Sequence 2 | from typing import Any, Literal, TypeAlias 3 | 4 | from typing_extensions import Self 5 | 6 | from mango.index import Order 7 | 8 | SortOrder: TypeAlias = Order | Literal[1, -1] 9 | 10 | 11 | class Pipeline(list[Mapping[str, Any]]): 12 | """聚合管道阶段""" 13 | 14 | def __init__(self, *stages: Mapping[str, Any]) -> None: 15 | super().__init__(stages) 16 | 17 | def stage(self, key: str, value: Any) -> Self: 18 | """添加一个阶段""" 19 | key = key if key.startswith("$") else f"${key}" 20 | self.append({key: value}) 21 | return self 22 | 23 | def bucket( 24 | self, 25 | group_by: Any, 26 | boundaries: Sequence[int | float], 27 | default: str | None = None, 28 | output: Mapping[str, Mapping[str, Any]] | None = None, 29 | ) -> Self: 30 | """ 31 | 根据指定的表达式和存储桶边界将传入文档分类到称为存储桶的组中,并为每个存储桶输出一个文档。 32 | 33 | group_by: 对文档进行分组的表达式。若要指定字段路径,请在字段名前面加上符号 `$` 。 34 | boundaries: 基于 `group_by` 表达式的值数组,用于指定每个桶的边界。每对相邻的值作为桶的包含下边界和独占上边界。 35 | default: 指定一个附加桶的 `_id`,该桶包含 `group_by` 表达式结果中,不属于边界指定的桶的所有文档。 36 | output: 指定输出文档中除 `_id` 字段外还要包含的字段。如果未指定文档,则操作返回包含文档数的字段在每个存储桶中。 37 | 38 | [$bucket (aggregation)](https://www.mongodb.com/docs/manual/reference/operator/aggregation/bucket/) 39 | """ 40 | if len(boundaries) < 2: # noqa: PLR2004 41 | raise ValueError("必须至少指定两个边界值") 42 | if sorted(boundaries) != boundaries: 43 | raise ValueError("指定的值必须以升序排列") 44 | struct = { 45 | "groupBy": group_by, 46 | "boundaries": boundaries, 47 | } 48 | if default: 49 | struct["default"] = default 50 | if output: 51 | struct["output"] = output 52 | return self.stage("bucket", struct) 53 | 54 | def count(self, field: str) -> Self: 55 | """ 56 | 将文档传递到下一阶段,该阶段包含输入到该阶段的文档数量的计数。 57 | 58 | field : 输出字段的名称,其值为计数结果。必须是非空字符串,不能以 `$` 开头,也不能包含 `.` 字符。 59 | 60 | [$count (aggregation)](https://www.mongodb.com/docs/manual/reference/operator/aggregation/count/) 61 | """ 62 | if not field or field.startswith("$") or "." in field: 63 | raise ValueError("必须是非空字符串, 不能以 `$` 开头,也不能包含 `.` 字符。") 64 | return self.stage("count", field) 65 | 66 | def documents(self, expression: Any) -> Self: 67 | """ 68 | 从输入值返回原始文档。 69 | 70 | expression: 接受任何解析为映射数组的有效表达式。 71 | 72 | [$documents (aggregation)](https://www.mongodb.com/docs/manual/reference/operator/aggregation/documents/) 73 | """ 74 | return self.stage("documents", expression) 75 | 76 | def facet(self, **fields: Self | Sequence[Mapping[str, Any]]) -> Self: 77 | """ 78 | 在同一组输入文档的单个阶段内处理多个聚合管道。每个子管道在输出文档中都有自己的字段,其结果存储为一个文档数组。 79 | 输入文档只被传递到 `$facet` 阶段一次。`$facet` 支持对同一组输入文档进行各种聚合,无需多次检索输入文档。 80 | 81 | fields: 参数名为子管道输出的字段名, 参数为需要运行的子管道。 82 | 83 | [$facet (aggregation)](https://www.mongodb.com/docs/manual/reference/operator/aggregation/facet/) 84 | """ 85 | return self.stage("facet", fields) 86 | 87 | def fill( 88 | self, 89 | output: Mapping[str, Any], 90 | partition_by: Any = None, 91 | partition_by_fields: Sequence[str] | None = None, 92 | sort_by: Mapping[str, SortOrder] | None = None, 93 | ) -> Self: 94 | """ 95 | 填充文档中的空值和缺少的字段值。 96 | 97 | output: 指定一个字典,该字典包含要填充缺失值的每个字段输出对象的字典。对象名称是要填充的字段的名称,对象值指定如何填充该字段。 98 | partition_by: 指定用于对文档进行分组的表达式。`partition_by` 和 `partition_by_fields` 是互斥的。 99 | partition_by_fields: 指定字段数组,以作为文档分组的复合键。`partition_by` 和 `partition_by_fields` 是互斥的。 100 | sort_by: 指定用于对每个分区内的文档进行排序的字段。 101 | 102 | [$fill (aggregation)](https://www.mongodb.com/docs/manual/reference/operator/aggregation/fill/) 103 | """ 104 | struct: dict[str, Any] = { 105 | "output": output, 106 | } 107 | if partition_by: 108 | struct["partitionBy"] = partition_by 109 | elif partition_by_fields: 110 | if isinstance(partition_by_fields, str): 111 | raise TypeError("partition_by_fields 不能为字符串") 112 | struct["partitionByFields"] = partition_by_fields 113 | if sort_by: 114 | struct["sortBy"] = sort_by 115 | return self.stage("fill", struct) 116 | 117 | def group(self, id: Any, **fields: Mapping[str, Any]) -> Self: 118 | """ 119 | `$group` 阶段根据“组键”将文档分成多个组。 120 | 组键通常是一个字段或一组字段。组键也可以是表达式的结果。 121 | 在 `$group` 阶段的输出中,`_id` 字段被设置为该文档的组键。 122 | 每个输出文档的字段都包含唯一的按值分组。输出文档还可以包含包含某些累加器表达式值的计算字段。 123 | 124 | id: 可以接受任何有效的表达式。如果指定 `id` 为 `None` 或任何其他常数值,那么此 `$group` 阶段将整体计算所有输入文档的累积值。 125 | fields: 参数名为额外添加的字段, 参数为累加器表达式。 126 | 127 | [$group (aggregation)](https://www.mongodb.com/docs/manual/reference/operator/aggregation/group/) 128 | """ 129 | return self.stage("group", {"_id": id, **fields}) 130 | 131 | def limit(self, integer: int) -> Self: 132 | """ 133 | 限制传递到管道中下一阶段的文档数。 134 | 135 | integer: 指定要传递的文档的最大数量。 136 | 137 | [$limit (aggregation)](https://www.mongodb.com/docs/manual/reference/operator/aggregation/limit/) 138 | """ 139 | return self.stage("limit", integer) 140 | 141 | def lookup( 142 | self, 143 | from_: str | None = None, 144 | local_field: str | None = None, 145 | foreign_field: str | None = None, 146 | let: Mapping[str, Any] | None = None, 147 | pipeline: Self | Sequence[Mapping[str, Any]] | None = None, 148 | as_: str | None = None, 149 | ) -> Self: 150 | """ 151 | 对同一数据库中的集合执行左外联接,以从“联接”集合中筛选文档进行处理。 152 | 此 `$lookup` 阶段为每个输入文档添加一个新的数组字段,新的数组字段包含“联接”集合中的匹配文档,并将这些重新成形的文档传递到下一阶段。 153 | 154 | from_: 指定同一数据库中要联接到当前集合的外部集合。 155 | local_field: 指定当前集合的文档中的字段。 156 | foreign_field: 指定外部集合的文档中的字段。 157 | let: 指定要在管道阶段中使用的变量。使用变量表达式访问输入到管道的文档字段。 158 | pipeline: 指定要在已联接集合上运行的管道。管道从已联接的集合中确定结果文档。若要返回所有文档,请指定空管道。 159 | as_: 指定要添加到输出文档的新数组字段的名称。新的数组字段包含来自 `from_` 集合的匹配文档。如果指定的名称已经存在于输出文档中,则覆盖现有字段。 160 | 161 | [$lookup (aggregation)](https://www.mongodb.com/docs/manual/reference/operator/aggregation/lookup/) 162 | """ 163 | return self.stage( 164 | "lookup", 165 | { 166 | k: v 167 | for k, v in { 168 | "from": from_, 169 | "localField": local_field, 170 | "foreignField": foreign_field, 171 | "let": let, 172 | "pipeline": pipeline, 173 | "as": as_, 174 | }.items() 175 | if v is None 176 | }, 177 | ) 178 | 179 | def match(self, query: Mapping[str, Any]) -> Self: 180 | """ 181 | 筛选文档流,仅将匹配指定条件的文档传递到下一个管道阶段。 182 | 183 | query: 指定查询条件。 184 | 185 | [$match (aggregation)](https://www.mongodb.com/docs/manual/reference/operator/aggregation/match/) 186 | """ 187 | return self.stage("match", query) 188 | 189 | def merge( 190 | self, 191 | collection: str, 192 | database: str | None = None, 193 | let: Mapping[str, Any] | None = None, 194 | on: str | Sequence[str] | None = None, 195 | matched: Literal["replace", "keepExisting", "merge", "fail"] 196 | | Self 197 | | Sequence[Mapping[str, Any]] = "merge", 198 | not_matched: Literal["insert", "discard", "fail"] = "insert", 199 | ) -> None: 200 | """ 201 | 将聚合管道的结果写入指定的集合。`$merge` 操作符必须是管道中的最后一个阶段。 202 | 203 | collection: 输出到指定集合。如果输出集合不存在,将会创建集合。 204 | database: 输出到指定数据库,不指定则默认输出到运行聚合管道的同一数据库。 205 | let: 指定在 `matched` 管道中使用的变量。 206 | on: 作为文档唯一标识符的一个或多个字段。该标识符用于确定结果文档是否与输出集合中的现有文档匹配。 207 | matched: 如果结果文档和输出集合中的现有文档对于字段上指定的值相同,则 `$merge` 的行为。 208 | not_matched: 如果结果文档与输出集合中的现有文档不匹配,则 `$merge` 的行为。 209 | 210 | [$merge (aggregation)](https://www.mongodb.com/docs/manual/reference/operator/aggregation/merge/) 211 | """ 212 | struct = { 213 | "into": {"db": database, "coll": collection} if database else collection, 214 | "whenMatched": matched, 215 | "whenNotMatched": not_matched, 216 | } 217 | if let: 218 | struct["let"] = let 219 | if on: 220 | struct["on"] = on 221 | self.stage("merge", struct) 222 | 223 | def out(self, collection: str, database: str | None = None) -> Self: 224 | """ 225 | 获取聚合管道返回的文档并将它们写入指定的集合。 226 | 227 | database: 输出数据库名称。 228 | collection: 输出集合名称。 229 | 230 | [$out (aggregation)](https://www.mongodb.com/docs/manual/reference/operator/aggregation/out/) 231 | """ 232 | return self.stage( 233 | "out", {"db": database, "coll": collection} if database else collection 234 | ) 235 | 236 | def project(self, **fields: bool | Mapping[str, Any]) -> Self: 237 | """ 238 | 将带有指定字段的文档传递到管道中的下一阶段。指定的字段可以是输入文档中的现有字段或新计算的字段。 239 | 240 | fields: 参数名为指定传递的字段。参数如果为布尔值,则可以包含或排除字段;如果为表达式,则可以添加新字段或重置现有字段的值。 241 | 242 | [$project (aggregation)](https://www.mongodb.com/docs/manual/reference/operator/aggregation/project/) 243 | """ 244 | return self.stage("project", fields) 245 | 246 | def redact(self, expression: Any) -> Self: 247 | """ 248 | 根据存储在文档本身中的信息限制文档的内容。 249 | 250 | expression: 可以是任何有效的表达式,只要它解析为 `$$DESCEND`、`$$PRUNE` 或 `$$KEEP` 系统变量。 251 | 252 | [$redact (aggregation)](https://www.mongodb.com/docs/manual/reference/operator/aggregation/redact/) 253 | """ 254 | return self.stage("redact", expression) 255 | 256 | def replace(self, replacement: Any) -> Self: 257 | """ 258 | 用指定的文档替换输入文档。该操作将替换输入文档中的所有现有字段,包括 `_id` 字段。 259 | 260 | replacement: 可以是解析为文档的任何有效表达式。 261 | 262 | [$replaceWith (aggregation)](https://www.mongodb.com/docs/manual/reference/operator/aggregation/replaceWith/) 263 | """ 264 | return self.stage("replaceWith", replacement) 265 | 266 | def sample(self, size: int) -> Self: 267 | """ 268 | 从输入文档中随机选择指定数量的文档。 269 | 270 | size: 随机选择的文档数量。 271 | 272 | [$sample (aggregation)](https://www.mongodb.com/docs/manual/reference/operator/aggregation/sample/) 273 | """ 274 | return self.stage("sample", {"size": size}) 275 | 276 | def set(self, **fields: Any) -> Self: 277 | """ 278 | 向文档中添加新字段。输出包含来自输入文档的所有现有字段和新添加的字段的文档。 279 | 280 | field: 参数名为需要添加的每个字段的名称,参数为聚合表达式。如果新字段的名称与现有字段名称(包括 `_id`)相同,`$set` 将使用指定表达式的值覆盖该字段的现有值。 281 | 282 | [$set (aggregation)](https://www.mongodb.com/docs/manual/reference/operator/aggregation/set/) 283 | """ 284 | return self.stage("set", fields) 285 | 286 | def skip(self, integer: int) -> Self: 287 | """ 288 | 跳过传递到该阶段的指定数量的文档,并将其余文档传递到管道中的下一个阶段。 289 | 290 | integer: 指定要跳过的文档的最大数量。 291 | 292 | [$skip (aggregation)](https://www.mongodb.com/docs/manual/reference/operator/aggregation/skip/) 293 | """ 294 | return self.stage("skip", integer) 295 | 296 | def sort(self, **fields: SortOrder) -> Self: 297 | """ 298 | 对所有输入文档进行排序,并按排序顺序将它们返回给管道。 299 | 300 | field: 参数名为指定要排序的字段,参数为字段的排序顺序。最多可以按32个字段进行排序。 301 | 302 | [$sort (aggregation)](https://www.mongodb.com/docs/manual/reference/operator/aggregation/sort/) 303 | """ 304 | return self.stage("sort", fields) 305 | 306 | def union( 307 | self, 308 | collection: str, 309 | pipeline: Self | Sequence[Mapping[str, Any]] | None = None, 310 | ) -> Self: 311 | """ 312 | 执行两个集合的联合。 313 | 将来自两个集合的管道结果合并为一个结果集,将组合的结果集(包括重复的)输出到下一阶段 314 | 315 | collection: 希望在结果集中包含其管道结果的集合。 316 | pipeline: 应用于指定的集合的聚合管道。 317 | 318 | [$unionWith (aggregation)](https://www.mongodb.com/docs/manual/reference/operator/aggregation/unionWith/) 319 | """ 320 | return self.stage( 321 | "unionWith", 322 | {"coll": collection, "pipeline": pipeline} if pipeline else collection, 323 | ) 324 | 325 | def unset(self, *fields: str) -> Self: 326 | """ 327 | 从文档中移除/排除字段。 328 | 329 | field: 指定要删除的字段。 330 | 331 | [$unset (aggregation)](https://www.mongodb.com/docs/manual/reference/operator/aggregation/unset/) 332 | """ 333 | return self.stage("unset", fields) 334 | 335 | def unwind( 336 | self, path: str, index_field: str | None = None, preserve_empty: bool = False 337 | ) -> Self: 338 | """ 339 | 从输入文档中解构数组字段以输出每个元素的文档。每个输出文档都是输入文档,数组字段的值由元素替换。 340 | 341 | path: 数组字段的字段路径。若要指定字段路径,请在字段名开头加上符号 `$`。 342 | index_field: 保存元素数组索引的新字段的名称。名称不能以符号 `$` 开头。 343 | preserve_empty: 在文档中的指定字段路径为 null、不存在或为空数组的情况下,如果该值为 `True`,则 `$unwind` 将输出文档,否则不会输出文档。 344 | 345 | [$unwind (aggregation)](https://www.mongodb.com/docs/manual/reference/operator/aggregation/unwind/#mongodb-pipeline-pipe.-unwind) 346 | """ 347 | struct = { 348 | "path": path, 349 | "preserveNullAndEmptyArrays": preserve_empty, 350 | } 351 | if index_field: 352 | struct["includeArrayIndex"] = index_field 353 | return self.stage("unwind", struct) 354 | -------------------------------------------------------------------------------- /pdm.lock: -------------------------------------------------------------------------------- 1 | # This file is @generated by PDM. 2 | # It is not intended for manual editing. 3 | 4 | [metadata] 5 | groups = ["default", "dev", "test"] 6 | strategy = ["cross_platform"] 7 | lock_version = "4.4.1" 8 | content_hash = "sha256:b190600022a00c1710ad91c386d2c8ba9a8ac1dbd14e27d68148e102d45e7e41" 9 | 10 | [[package]] 11 | name = "allure-pytest" 12 | version = "2.13.5" 13 | summary = "Allure pytest integration" 14 | dependencies = [ 15 | "allure-python-commons==2.13.5", 16 | "pytest>=4.5.0", 17 | ] 18 | files = [ 19 | {file = "allure-pytest-2.13.5.tar.gz", hash = "sha256:0ef8e1790c44a988db6b83c4d4f5e91451e2c4c8ea10601dfa88528d23afcf6e"}, 20 | {file = "allure_pytest-2.13.5-py3-none-any.whl", hash = "sha256:94130bac32964b78058e62cf4b815ad97a5ac82a065e6dd2d43abac2be7640fc"}, 21 | ] 22 | 23 | [[package]] 24 | name = "allure-python-commons" 25 | version = "2.13.5" 26 | requires_python = ">=3.6" 27 | summary = "('Contains the API for end users as well as helper functions and classes to build Allure adapters for Python test frameworks',)" 28 | dependencies = [ 29 | "attrs>=16.0.0", 30 | "pluggy>=0.4.0", 31 | ] 32 | files = [ 33 | {file = "allure-python-commons-2.13.5.tar.gz", hash = "sha256:a232e7955811f988e49a4c1dd6c16cce7e9b81d0ea0422b1e5654d3254e2caf3"}, 34 | {file = "allure_python_commons-2.13.5-py3-none-any.whl", hash = "sha256:8b0e837b6e32d810adec563f49e1d04127a5b6770e0232065b7cb09b9953980d"}, 35 | ] 36 | 37 | [[package]] 38 | name = "annotated-types" 39 | version = "0.6.0" 40 | requires_python = ">=3.8" 41 | summary = "Reusable constraint types to use with typing.Annotated" 42 | files = [ 43 | {file = "annotated_types-0.6.0-py3-none-any.whl", hash = "sha256:0641064de18ba7a25dee8f96403ebc39113d0cb953a01429249d5c7564666a43"}, 44 | {file = "annotated_types-0.6.0.tar.gz", hash = "sha256:563339e807e53ffd9c267e99fc6d9ea23eb8443c08f112651963e24e22f84a5d"}, 45 | ] 46 | 47 | [[package]] 48 | name = "anyio" 49 | version = "3.7.1" 50 | requires_python = ">=3.7" 51 | summary = "High level compatibility layer for multiple asynchronous event loop implementations" 52 | dependencies = [ 53 | "exceptiongroup; python_version < \"3.11\"", 54 | "idna>=2.8", 55 | "sniffio>=1.1", 56 | ] 57 | files = [ 58 | {file = "anyio-3.7.1-py3-none-any.whl", hash = "sha256:91dee416e570e92c64041bd18b900d1d6fa78dff7048769ce5ac5ddad004fbb5"}, 59 | {file = "anyio-3.7.1.tar.gz", hash = "sha256:44a3c9aba0f5defa43261a8b3efb97891f2bd7d804e0e1f56419befa1adfc780"}, 60 | ] 61 | 62 | [[package]] 63 | name = "attrs" 64 | version = "23.1.0" 65 | requires_python = ">=3.7" 66 | summary = "Classes Without Boilerplate" 67 | files = [ 68 | {file = "attrs-23.1.0-py3-none-any.whl", hash = "sha256:1f28b4522cdc2fb4256ac1a020c78acf9cba2c6b461ccd2c126f3aa8e8335d04"}, 69 | {file = "attrs-23.1.0.tar.gz", hash = "sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015"}, 70 | ] 71 | 72 | [[package]] 73 | name = "certifi" 74 | version = "2023.7.22" 75 | requires_python = ">=3.6" 76 | summary = "Python package for providing Mozilla's CA Bundle." 77 | files = [ 78 | {file = "certifi-2023.7.22-py3-none-any.whl", hash = "sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9"}, 79 | {file = "certifi-2023.7.22.tar.gz", hash = "sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082"}, 80 | ] 81 | 82 | [[package]] 83 | name = "cfgv" 84 | version = "3.3.1" 85 | requires_python = ">=3.6.1" 86 | summary = "Validate configuration and produce human readable error messages." 87 | files = [ 88 | {file = "cfgv-3.3.1-py2.py3-none-any.whl", hash = "sha256:c6a0883f3917a037485059700b9e75da2464e6c27051014ad85ba6aaa5884426"}, 89 | {file = "cfgv-3.3.1.tar.gz", hash = "sha256:f5a830efb9ce7a445376bb66ec94c638a9787422f96264c98edc6bdeed8ab736"}, 90 | ] 91 | 92 | [[package]] 93 | name = "colorama" 94 | version = "0.4.6" 95 | requires_python = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" 96 | summary = "Cross-platform colored terminal text." 97 | files = [ 98 | {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, 99 | {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, 100 | ] 101 | 102 | [[package]] 103 | name = "coverage" 104 | version = "7.2.7" 105 | requires_python = ">=3.7" 106 | summary = "Code coverage measurement for Python" 107 | files = [ 108 | {file = "coverage-7.2.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d39b5b4f2a66ccae8b7263ac3c8170994b65266797fb96cbbfd3fb5b23921db8"}, 109 | {file = "coverage-7.2.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6d040ef7c9859bb11dfeb056ff5b3872436e3b5e401817d87a31e1750b9ae2fb"}, 110 | {file = "coverage-7.2.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ba90a9563ba44a72fda2e85302c3abc71c5589cea608ca16c22b9804262aaeb6"}, 111 | {file = "coverage-7.2.7-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7d9405291c6928619403db1d10bd07888888ec1abcbd9748fdaa971d7d661b2"}, 112 | {file = "coverage-7.2.7-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:31563e97dae5598556600466ad9beea39fb04e0229e61c12eaa206e0aa202063"}, 113 | {file = "coverage-7.2.7-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:ebba1cd308ef115925421d3e6a586e655ca5a77b5bf41e02eb0e4562a111f2d1"}, 114 | {file = "coverage-7.2.7-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:cb017fd1b2603ef59e374ba2063f593abe0fc45f2ad9abdde5b4d83bd922a353"}, 115 | {file = "coverage-7.2.7-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d62a5c7dad11015c66fbb9d881bc4caa5b12f16292f857842d9d1871595f4495"}, 116 | {file = "coverage-7.2.7-cp310-cp310-win32.whl", hash = "sha256:ee57190f24fba796e36bb6d3aa8a8783c643d8fa9760c89f7a98ab5455fbf818"}, 117 | {file = "coverage-7.2.7-cp310-cp310-win_amd64.whl", hash = "sha256:f75f7168ab25dd93110c8a8117a22450c19976afbc44234cbf71481094c1b850"}, 118 | {file = "coverage-7.2.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:06a9a2be0b5b576c3f18f1a241f0473575c4a26021b52b2a85263a00f034d51f"}, 119 | {file = "coverage-7.2.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5baa06420f837184130752b7c5ea0808762083bf3487b5038d68b012e5937dbe"}, 120 | {file = "coverage-7.2.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fdec9e8cbf13a5bf63290fc6013d216a4c7232efb51548594ca3631a7f13c3a3"}, 121 | {file = "coverage-7.2.7-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:52edc1a60c0d34afa421c9c37078817b2e67a392cab17d97283b64c5833f427f"}, 122 | {file = "coverage-7.2.7-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63426706118b7f5cf6bb6c895dc215d8a418d5952544042c8a2d9fe87fcf09cb"}, 123 | {file = "coverage-7.2.7-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:afb17f84d56068a7c29f5fa37bfd38d5aba69e3304af08ee94da8ed5b0865833"}, 124 | {file = "coverage-7.2.7-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:48c19d2159d433ccc99e729ceae7d5293fbffa0bdb94952d3579983d1c8c9d97"}, 125 | {file = "coverage-7.2.7-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0e1f928eaf5469c11e886fe0885ad2bf1ec606434e79842a879277895a50942a"}, 126 | {file = "coverage-7.2.7-cp311-cp311-win32.whl", hash = "sha256:33d6d3ea29d5b3a1a632b3c4e4f4ecae24ef170b0b9ee493883f2df10039959a"}, 127 | {file = "coverage-7.2.7-cp311-cp311-win_amd64.whl", hash = "sha256:5b7540161790b2f28143191f5f8ec02fb132660ff175b7747b95dcb77ac26562"}, 128 | {file = "coverage-7.2.7-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f2f67fe12b22cd130d34d0ef79206061bfb5eda52feb6ce0dba0644e20a03cf4"}, 129 | {file = "coverage-7.2.7-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a342242fe22407f3c17f4b499276a02b01e80f861f1682ad1d95b04018e0c0d4"}, 130 | {file = "coverage-7.2.7-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:171717c7cb6b453aebac9a2ef603699da237f341b38eebfee9be75d27dc38e01"}, 131 | {file = "coverage-7.2.7-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49969a9f7ffa086d973d91cec8d2e31080436ef0fb4a359cae927e742abfaaa6"}, 132 | {file = "coverage-7.2.7-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b46517c02ccd08092f4fa99f24c3b83d8f92f739b4657b0f146246a0ca6a831d"}, 133 | {file = "coverage-7.2.7-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:a3d33a6b3eae87ceaefa91ffdc130b5e8536182cd6dfdbfc1aa56b46ff8c86de"}, 134 | {file = "coverage-7.2.7-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:976b9c42fb2a43ebf304fa7d4a310e5f16cc99992f33eced91ef6f908bd8f33d"}, 135 | {file = "coverage-7.2.7-cp312-cp312-win32.whl", hash = "sha256:8de8bb0e5ad103888d65abef8bca41ab93721647590a3f740100cd65c3b00511"}, 136 | {file = "coverage-7.2.7-cp312-cp312-win_amd64.whl", hash = "sha256:9e31cb64d7de6b6f09702bb27c02d1904b3aebfca610c12772452c4e6c21a0d3"}, 137 | {file = "coverage-7.2.7-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:58c2ccc2f00ecb51253cbe5d8d7122a34590fac9646a960d1430d5b15321d95f"}, 138 | {file = "coverage-7.2.7-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d22656368f0e6189e24722214ed8d66b8022db19d182927b9a248a2a8a2f67eb"}, 139 | {file = "coverage-7.2.7-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a895fcc7b15c3fc72beb43cdcbdf0ddb7d2ebc959edac9cef390b0d14f39f8a9"}, 140 | {file = "coverage-7.2.7-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e84606b74eb7de6ff581a7915e2dab7a28a0517fbe1c9239eb227e1354064dcd"}, 141 | {file = "coverage-7.2.7-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:0a5f9e1dbd7fbe30196578ca36f3fba75376fb99888c395c5880b355e2875f8a"}, 142 | {file = "coverage-7.2.7-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:419bfd2caae268623dd469eff96d510a920c90928b60f2073d79f8fe2bbc5959"}, 143 | {file = "coverage-7.2.7-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:2aee274c46590717f38ae5e4650988d1af340fe06167546cc32fe2f58ed05b02"}, 144 | {file = "coverage-7.2.7-cp37-cp37m-win32.whl", hash = "sha256:61b9a528fb348373c433e8966535074b802c7a5d7f23c4f421e6c6e2f1697a6f"}, 145 | {file = "coverage-7.2.7-cp37-cp37m-win_amd64.whl", hash = "sha256:b1c546aca0ca4d028901d825015dc8e4d56aac4b541877690eb76490f1dc8ed0"}, 146 | {file = "coverage-7.2.7-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:54b896376ab563bd38453cecb813c295cf347cf5906e8b41d340b0321a5433e5"}, 147 | {file = "coverage-7.2.7-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3d376df58cc111dc8e21e3b6e24606b5bb5dee6024f46a5abca99124b2229ef5"}, 148 | {file = "coverage-7.2.7-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e330fc79bd7207e46c7d7fd2bb4af2963f5f635703925543a70b99574b0fea9"}, 149 | {file = "coverage-7.2.7-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e9d683426464e4a252bf70c3498756055016f99ddaec3774bf368e76bbe02b6"}, 150 | {file = "coverage-7.2.7-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d13c64ee2d33eccf7437961b6ea7ad8673e2be040b4f7fd4fd4d4d28d9ccb1e"}, 151 | {file = "coverage-7.2.7-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b7aa5f8a41217360e600da646004f878250a0d6738bcdc11a0a39928d7dc2050"}, 152 | {file = "coverage-7.2.7-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8fa03bce9bfbeeef9f3b160a8bed39a221d82308b4152b27d82d8daa7041fee5"}, 153 | {file = "coverage-7.2.7-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:245167dd26180ab4c91d5e1496a30be4cd721a5cf2abf52974f965f10f11419f"}, 154 | {file = "coverage-7.2.7-cp38-cp38-win32.whl", hash = "sha256:d2c2db7fd82e9b72937969bceac4d6ca89660db0a0967614ce2481e81a0b771e"}, 155 | {file = "coverage-7.2.7-cp38-cp38-win_amd64.whl", hash = "sha256:2e07b54284e381531c87f785f613b833569c14ecacdcb85d56b25c4622c16c3c"}, 156 | {file = "coverage-7.2.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:537891ae8ce59ef63d0123f7ac9e2ae0fc8b72c7ccbe5296fec45fd68967b6c9"}, 157 | {file = "coverage-7.2.7-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:06fb182e69f33f6cd1d39a6c597294cff3143554b64b9825d1dc69d18cc2fff2"}, 158 | {file = "coverage-7.2.7-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:201e7389591af40950a6480bd9edfa8ed04346ff80002cec1a66cac4549c1ad7"}, 159 | {file = "coverage-7.2.7-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f6951407391b639504e3b3be51b7ba5f3528adbf1a8ac3302b687ecababf929e"}, 160 | {file = "coverage-7.2.7-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f48351d66575f535669306aa7d6d6f71bc43372473b54a832222803eb956fd1"}, 161 | {file = "coverage-7.2.7-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b29019c76039dc3c0fd815c41392a044ce555d9bcdd38b0fb60fb4cd8e475ba9"}, 162 | {file = "coverage-7.2.7-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:81c13a1fc7468c40f13420732805a4c38a105d89848b7c10af65a90beff25250"}, 163 | {file = "coverage-7.2.7-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:975d70ab7e3c80a3fe86001d8751f6778905ec723f5b110aed1e450da9d4b7f2"}, 164 | {file = "coverage-7.2.7-cp39-cp39-win32.whl", hash = "sha256:7ee7d9d4822c8acc74a5e26c50604dff824710bc8de424904c0982e25c39c6cb"}, 165 | {file = "coverage-7.2.7-cp39-cp39-win_amd64.whl", hash = "sha256:eb393e5ebc85245347950143969b241d08b52b88a3dc39479822e073a1a8eb27"}, 166 | {file = "coverage-7.2.7-pp37.pp38.pp39-none-any.whl", hash = "sha256:b7b4c971f05e6ae490fef852c218b0e79d4e52f79ef0c8475566584a8fb3e01d"}, 167 | {file = "coverage-7.2.7.tar.gz", hash = "sha256:924d94291ca674905fe9481f12294eb11f2d3d3fd1adb20314ba89e94f44ed59"}, 168 | ] 169 | 170 | [[package]] 171 | name = "coverage" 172 | version = "7.2.7" 173 | extras = ["toml"] 174 | requires_python = ">=3.7" 175 | summary = "Code coverage measurement for Python" 176 | dependencies = [ 177 | "coverage==7.2.7", 178 | "tomli; python_full_version <= \"3.11.0a6\"", 179 | ] 180 | files = [ 181 | {file = "coverage-7.2.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d39b5b4f2a66ccae8b7263ac3c8170994b65266797fb96cbbfd3fb5b23921db8"}, 182 | {file = "coverage-7.2.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6d040ef7c9859bb11dfeb056ff5b3872436e3b5e401817d87a31e1750b9ae2fb"}, 183 | {file = "coverage-7.2.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ba90a9563ba44a72fda2e85302c3abc71c5589cea608ca16c22b9804262aaeb6"}, 184 | {file = "coverage-7.2.7-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7d9405291c6928619403db1d10bd07888888ec1abcbd9748fdaa971d7d661b2"}, 185 | {file = "coverage-7.2.7-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:31563e97dae5598556600466ad9beea39fb04e0229e61c12eaa206e0aa202063"}, 186 | {file = "coverage-7.2.7-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:ebba1cd308ef115925421d3e6a586e655ca5a77b5bf41e02eb0e4562a111f2d1"}, 187 | {file = "coverage-7.2.7-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:cb017fd1b2603ef59e374ba2063f593abe0fc45f2ad9abdde5b4d83bd922a353"}, 188 | {file = "coverage-7.2.7-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d62a5c7dad11015c66fbb9d881bc4caa5b12f16292f857842d9d1871595f4495"}, 189 | {file = "coverage-7.2.7-cp310-cp310-win32.whl", hash = "sha256:ee57190f24fba796e36bb6d3aa8a8783c643d8fa9760c89f7a98ab5455fbf818"}, 190 | {file = "coverage-7.2.7-cp310-cp310-win_amd64.whl", hash = "sha256:f75f7168ab25dd93110c8a8117a22450c19976afbc44234cbf71481094c1b850"}, 191 | {file = "coverage-7.2.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:06a9a2be0b5b576c3f18f1a241f0473575c4a26021b52b2a85263a00f034d51f"}, 192 | {file = "coverage-7.2.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5baa06420f837184130752b7c5ea0808762083bf3487b5038d68b012e5937dbe"}, 193 | {file = "coverage-7.2.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fdec9e8cbf13a5bf63290fc6013d216a4c7232efb51548594ca3631a7f13c3a3"}, 194 | {file = "coverage-7.2.7-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:52edc1a60c0d34afa421c9c37078817b2e67a392cab17d97283b64c5833f427f"}, 195 | {file = "coverage-7.2.7-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63426706118b7f5cf6bb6c895dc215d8a418d5952544042c8a2d9fe87fcf09cb"}, 196 | {file = "coverage-7.2.7-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:afb17f84d56068a7c29f5fa37bfd38d5aba69e3304af08ee94da8ed5b0865833"}, 197 | {file = "coverage-7.2.7-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:48c19d2159d433ccc99e729ceae7d5293fbffa0bdb94952d3579983d1c8c9d97"}, 198 | {file = "coverage-7.2.7-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0e1f928eaf5469c11e886fe0885ad2bf1ec606434e79842a879277895a50942a"}, 199 | {file = "coverage-7.2.7-cp311-cp311-win32.whl", hash = "sha256:33d6d3ea29d5b3a1a632b3c4e4f4ecae24ef170b0b9ee493883f2df10039959a"}, 200 | {file = "coverage-7.2.7-cp311-cp311-win_amd64.whl", hash = "sha256:5b7540161790b2f28143191f5f8ec02fb132660ff175b7747b95dcb77ac26562"}, 201 | {file = "coverage-7.2.7-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f2f67fe12b22cd130d34d0ef79206061bfb5eda52feb6ce0dba0644e20a03cf4"}, 202 | {file = "coverage-7.2.7-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a342242fe22407f3c17f4b499276a02b01e80f861f1682ad1d95b04018e0c0d4"}, 203 | {file = "coverage-7.2.7-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:171717c7cb6b453aebac9a2ef603699da237f341b38eebfee9be75d27dc38e01"}, 204 | {file = "coverage-7.2.7-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49969a9f7ffa086d973d91cec8d2e31080436ef0fb4a359cae927e742abfaaa6"}, 205 | {file = "coverage-7.2.7-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b46517c02ccd08092f4fa99f24c3b83d8f92f739b4657b0f146246a0ca6a831d"}, 206 | {file = "coverage-7.2.7-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:a3d33a6b3eae87ceaefa91ffdc130b5e8536182cd6dfdbfc1aa56b46ff8c86de"}, 207 | {file = "coverage-7.2.7-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:976b9c42fb2a43ebf304fa7d4a310e5f16cc99992f33eced91ef6f908bd8f33d"}, 208 | {file = "coverage-7.2.7-cp312-cp312-win32.whl", hash = "sha256:8de8bb0e5ad103888d65abef8bca41ab93721647590a3f740100cd65c3b00511"}, 209 | {file = "coverage-7.2.7-cp312-cp312-win_amd64.whl", hash = "sha256:9e31cb64d7de6b6f09702bb27c02d1904b3aebfca610c12772452c4e6c21a0d3"}, 210 | {file = "coverage-7.2.7-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:58c2ccc2f00ecb51253cbe5d8d7122a34590fac9646a960d1430d5b15321d95f"}, 211 | {file = "coverage-7.2.7-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d22656368f0e6189e24722214ed8d66b8022db19d182927b9a248a2a8a2f67eb"}, 212 | {file = "coverage-7.2.7-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a895fcc7b15c3fc72beb43cdcbdf0ddb7d2ebc959edac9cef390b0d14f39f8a9"}, 213 | {file = "coverage-7.2.7-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e84606b74eb7de6ff581a7915e2dab7a28a0517fbe1c9239eb227e1354064dcd"}, 214 | {file = "coverage-7.2.7-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:0a5f9e1dbd7fbe30196578ca36f3fba75376fb99888c395c5880b355e2875f8a"}, 215 | {file = "coverage-7.2.7-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:419bfd2caae268623dd469eff96d510a920c90928b60f2073d79f8fe2bbc5959"}, 216 | {file = "coverage-7.2.7-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:2aee274c46590717f38ae5e4650988d1af340fe06167546cc32fe2f58ed05b02"}, 217 | {file = "coverage-7.2.7-cp37-cp37m-win32.whl", hash = "sha256:61b9a528fb348373c433e8966535074b802c7a5d7f23c4f421e6c6e2f1697a6f"}, 218 | {file = "coverage-7.2.7-cp37-cp37m-win_amd64.whl", hash = "sha256:b1c546aca0ca4d028901d825015dc8e4d56aac4b541877690eb76490f1dc8ed0"}, 219 | {file = "coverage-7.2.7-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:54b896376ab563bd38453cecb813c295cf347cf5906e8b41d340b0321a5433e5"}, 220 | {file = "coverage-7.2.7-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3d376df58cc111dc8e21e3b6e24606b5bb5dee6024f46a5abca99124b2229ef5"}, 221 | {file = "coverage-7.2.7-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e330fc79bd7207e46c7d7fd2bb4af2963f5f635703925543a70b99574b0fea9"}, 222 | {file = "coverage-7.2.7-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e9d683426464e4a252bf70c3498756055016f99ddaec3774bf368e76bbe02b6"}, 223 | {file = "coverage-7.2.7-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d13c64ee2d33eccf7437961b6ea7ad8673e2be040b4f7fd4fd4d4d28d9ccb1e"}, 224 | {file = "coverage-7.2.7-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b7aa5f8a41217360e600da646004f878250a0d6738bcdc11a0a39928d7dc2050"}, 225 | {file = "coverage-7.2.7-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8fa03bce9bfbeeef9f3b160a8bed39a221d82308b4152b27d82d8daa7041fee5"}, 226 | {file = "coverage-7.2.7-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:245167dd26180ab4c91d5e1496a30be4cd721a5cf2abf52974f965f10f11419f"}, 227 | {file = "coverage-7.2.7-cp38-cp38-win32.whl", hash = "sha256:d2c2db7fd82e9b72937969bceac4d6ca89660db0a0967614ce2481e81a0b771e"}, 228 | {file = "coverage-7.2.7-cp38-cp38-win_amd64.whl", hash = "sha256:2e07b54284e381531c87f785f613b833569c14ecacdcb85d56b25c4622c16c3c"}, 229 | {file = "coverage-7.2.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:537891ae8ce59ef63d0123f7ac9e2ae0fc8b72c7ccbe5296fec45fd68967b6c9"}, 230 | {file = "coverage-7.2.7-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:06fb182e69f33f6cd1d39a6c597294cff3143554b64b9825d1dc69d18cc2fff2"}, 231 | {file = "coverage-7.2.7-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:201e7389591af40950a6480bd9edfa8ed04346ff80002cec1a66cac4549c1ad7"}, 232 | {file = "coverage-7.2.7-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f6951407391b639504e3b3be51b7ba5f3528adbf1a8ac3302b687ecababf929e"}, 233 | {file = "coverage-7.2.7-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f48351d66575f535669306aa7d6d6f71bc43372473b54a832222803eb956fd1"}, 234 | {file = "coverage-7.2.7-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b29019c76039dc3c0fd815c41392a044ce555d9bcdd38b0fb60fb4cd8e475ba9"}, 235 | {file = "coverage-7.2.7-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:81c13a1fc7468c40f13420732805a4c38a105d89848b7c10af65a90beff25250"}, 236 | {file = "coverage-7.2.7-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:975d70ab7e3c80a3fe86001d8751f6778905ec723f5b110aed1e450da9d4b7f2"}, 237 | {file = "coverage-7.2.7-cp39-cp39-win32.whl", hash = "sha256:7ee7d9d4822c8acc74a5e26c50604dff824710bc8de424904c0982e25c39c6cb"}, 238 | {file = "coverage-7.2.7-cp39-cp39-win_amd64.whl", hash = "sha256:eb393e5ebc85245347950143969b241d08b52b88a3dc39479822e073a1a8eb27"}, 239 | {file = "coverage-7.2.7-pp37.pp38.pp39-none-any.whl", hash = "sha256:b7b4c971f05e6ae490fef852c218b0e79d4e52f79ef0c8475566584a8fb3e01d"}, 240 | {file = "coverage-7.2.7.tar.gz", hash = "sha256:924d94291ca674905fe9481f12294eb11f2d3d3fd1adb20314ba89e94f44ed59"}, 241 | ] 242 | 243 | [[package]] 244 | name = "distlib" 245 | version = "0.3.7" 246 | summary = "Distribution utilities" 247 | files = [ 248 | {file = "distlib-0.3.7-py2.py3-none-any.whl", hash = "sha256:2e24928bc811348f0feb63014e97aaae3037f2cf48712d51ae61df7fd6075057"}, 249 | {file = "distlib-0.3.7.tar.gz", hash = "sha256:9dafe54b34a028eafd95039d5e5d4851a13734540f1331060d31c9916e7147a8"}, 250 | ] 251 | 252 | [[package]] 253 | name = "dnspython" 254 | version = "2.4.0" 255 | requires_python = ">=3.8,<4.0" 256 | summary = "DNS toolkit" 257 | dependencies = [ 258 | "httpcore>=0.17.3; python_version >= \"3.8\"", 259 | "sniffio<2.0,>=1.1", 260 | ] 261 | files = [ 262 | {file = "dnspython-2.4.0-py3-none-any.whl", hash = "sha256:46b4052a55b56beea3a3bdd7b30295c292bd6827dd442348bc116f2d35b17f0a"}, 263 | {file = "dnspython-2.4.0.tar.gz", hash = "sha256:758e691dbb454d5ccf4e1b154a19e52847f79e21a42fef17b969144af29a4e6c"}, 264 | ] 265 | 266 | [[package]] 267 | name = "exceptiongroup" 268 | version = "1.1.2" 269 | requires_python = ">=3.7" 270 | summary = "Backport of PEP 654 (exception groups)" 271 | files = [ 272 | {file = "exceptiongroup-1.1.2-py3-none-any.whl", hash = "sha256:e346e69d186172ca7cf029c8c1d16235aa0e04035e5750b4b95039e65204328f"}, 273 | {file = "exceptiongroup-1.1.2.tar.gz", hash = "sha256:12c3e887d6485d16943a309616de20ae5582633e0a2eda17f4e10fd61c1e8af5"}, 274 | ] 275 | 276 | [[package]] 277 | name = "faker" 278 | version = "24.8.0" 279 | requires_python = ">=3.8" 280 | summary = "Faker is a Python package that generates fake data for you." 281 | dependencies = [ 282 | "python-dateutil>=2.4", 283 | ] 284 | files = [ 285 | {file = "Faker-24.8.0-py3-none-any.whl", hash = "sha256:2f70a7817b4147d67c544192e169c5653060fce8aef758db0ea8823d89caac94"}, 286 | {file = "Faker-24.8.0.tar.gz", hash = "sha256:1a46466b22c6bf5925448f725f90c6e0d8bf085819906520ddaa15aec58a6df5"}, 287 | ] 288 | 289 | [[package]] 290 | name = "filelock" 291 | version = "3.12.2" 292 | requires_python = ">=3.7" 293 | summary = "A platform independent file lock." 294 | files = [ 295 | {file = "filelock-3.12.2-py3-none-any.whl", hash = "sha256:cbb791cdea2a72f23da6ac5b5269ab0a0d161e9ef0100e653b69049a7706d1ec"}, 296 | {file = "filelock-3.12.2.tar.gz", hash = "sha256:002740518d8aa59a26b0c76e10fb8c6e15eae825d34b6fdf670333fd7b938d81"}, 297 | ] 298 | 299 | [[package]] 300 | name = "h11" 301 | version = "0.14.0" 302 | requires_python = ">=3.7" 303 | summary = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" 304 | files = [ 305 | {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, 306 | {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, 307 | ] 308 | 309 | [[package]] 310 | name = "httpcore" 311 | version = "0.17.3" 312 | requires_python = ">=3.7" 313 | summary = "A minimal low-level HTTP client." 314 | dependencies = [ 315 | "anyio<5.0,>=3.0", 316 | "certifi", 317 | "h11<0.15,>=0.13", 318 | "sniffio==1.*", 319 | ] 320 | files = [ 321 | {file = "httpcore-0.17.3-py3-none-any.whl", hash = "sha256:c2789b767ddddfa2a5782e3199b2b7f6894540b17b16ec26b2c4d8e103510b87"}, 322 | {file = "httpcore-0.17.3.tar.gz", hash = "sha256:a6f30213335e34c1ade7be6ec7c47f19f50c56db36abef1a9dfa3815b1cb3888"}, 323 | ] 324 | 325 | [[package]] 326 | name = "hypothesis" 327 | version = "6.100.1" 328 | requires_python = ">=3.8" 329 | summary = "A library for property-based testing" 330 | dependencies = [ 331 | "attrs>=22.2.0", 332 | "exceptiongroup>=1.0.0; python_version < \"3.11\"", 333 | "sortedcontainers<3.0.0,>=2.1.0", 334 | ] 335 | files = [ 336 | {file = "hypothesis-6.100.1-py3-none-any.whl", hash = "sha256:3dacf6ec90e8d14aaee02cde081ac9a17d5b70105e45e6ac822db72052c0195b"}, 337 | {file = "hypothesis-6.100.1.tar.gz", hash = "sha256:ebff09d7fa4f1fb6a855a812baf17e578b4481b7b70ec6d96496210d1a4c6c35"}, 338 | ] 339 | 340 | [[package]] 341 | name = "identify" 342 | version = "2.5.26" 343 | requires_python = ">=3.8" 344 | summary = "File identification library for Python" 345 | files = [ 346 | {file = "identify-2.5.26-py2.py3-none-any.whl", hash = "sha256:c22a8ead0d4ca11f1edd6c9418c3220669b3b7533ada0a0ffa6cc0ef85cf9b54"}, 347 | {file = "identify-2.5.26.tar.gz", hash = "sha256:7243800bce2f58404ed41b7c002e53d4d22bcf3ae1b7900c2d7aefd95394bf7f"}, 348 | ] 349 | 350 | [[package]] 351 | name = "idna" 352 | version = "3.4" 353 | requires_python = ">=3.5" 354 | summary = "Internationalized Domain Names in Applications (IDNA)" 355 | files = [ 356 | {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"}, 357 | {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"}, 358 | ] 359 | 360 | [[package]] 361 | name = "iniconfig" 362 | version = "2.0.0" 363 | requires_python = ">=3.7" 364 | summary = "brain-dead simple config-ini parsing" 365 | files = [ 366 | {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, 367 | {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, 368 | ] 369 | 370 | [[package]] 371 | name = "motor" 372 | version = "3.4.0" 373 | requires_python = ">=3.7" 374 | summary = "Non-blocking MongoDB driver for Tornado or asyncio" 375 | dependencies = [ 376 | "pymongo<5,>=4.5", 377 | ] 378 | files = [ 379 | {file = "motor-3.4.0-py3-none-any.whl", hash = "sha256:4b1e1a0cc5116ff73be2c080a72da078f2bb719b53bc7a6bb9e9a2f7dcd421ed"}, 380 | {file = "motor-3.4.0.tar.gz", hash = "sha256:c89b4e4eb2e711345e91c7c9b122cb68cce0e5e869ed0387dd0acb10775e3131"}, 381 | ] 382 | 383 | [[package]] 384 | name = "motor-types" 385 | version = "1.0.0b4" 386 | requires_python = ">=3.8.0,<4.0" 387 | summary = "Python stubs for Motor, a Non-Blocking MongoDB driver for Python's Tornado and AsyncIO based applications." 388 | dependencies = [ 389 | "pymongo>=4.3.0", 390 | "typing-extensions>=4.0.0", 391 | ] 392 | files = [ 393 | {file = "motor_types-1.0.0b4-py3-none-any.whl", hash = "sha256:6dc37c87a78acdbd658abbc4426ba3d239c4a26485a12325c65439cb7a2e27bf"}, 394 | {file = "motor_types-1.0.0b4.tar.gz", hash = "sha256:ece2ec5283dfef1a4d3d693db15a010b6c56fea0d46697184e36880935a9f940"}, 395 | ] 396 | 397 | [[package]] 398 | name = "nodeenv" 399 | version = "1.8.0" 400 | requires_python = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*" 401 | summary = "Node.js virtual environment builder" 402 | dependencies = [ 403 | "setuptools", 404 | ] 405 | files = [ 406 | {file = "nodeenv-1.8.0-py2.py3-none-any.whl", hash = "sha256:df865724bb3c3adc86b3876fa209771517b0cfe596beff01a92700e0e8be4cec"}, 407 | {file = "nodeenv-1.8.0.tar.gz", hash = "sha256:d51e0c37e64fbf47d017feac3145cdbb58836d7eee8c6f6d3b6880c5456227d2"}, 408 | ] 409 | 410 | [[package]] 411 | name = "packaging" 412 | version = "23.1" 413 | requires_python = ">=3.7" 414 | summary = "Core utilities for Python packages" 415 | files = [ 416 | {file = "packaging-23.1-py3-none-any.whl", hash = "sha256:994793af429502c4ea2ebf6bf664629d07c1a9fe974af92966e4b8d2df7edc61"}, 417 | {file = "packaging-23.1.tar.gz", hash = "sha256:a392980d2b6cffa644431898be54b0045151319d1e7ec34f0cfed48767dd334f"}, 418 | ] 419 | 420 | [[package]] 421 | name = "platformdirs" 422 | version = "3.9.1" 423 | requires_python = ">=3.7" 424 | summary = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." 425 | files = [ 426 | {file = "platformdirs-3.9.1-py3-none-any.whl", hash = "sha256:ad8291ae0ae5072f66c16945166cb11c63394c7a3ad1b1bc9828ca3162da8c2f"}, 427 | {file = "platformdirs-3.9.1.tar.gz", hash = "sha256:1b42b450ad933e981d56e59f1b97495428c9bd60698baab9f3eb3d00d5822421"}, 428 | ] 429 | 430 | [[package]] 431 | name = "pluggy" 432 | version = "1.4.0" 433 | requires_python = ">=3.8" 434 | summary = "plugin and hook calling mechanisms for python" 435 | files = [ 436 | {file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"}, 437 | {file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"}, 438 | ] 439 | 440 | [[package]] 441 | name = "pre-commit" 442 | version = "3.7.0" 443 | requires_python = ">=3.9" 444 | summary = "A framework for managing and maintaining multi-language pre-commit hooks." 445 | dependencies = [ 446 | "cfgv>=2.0.0", 447 | "identify>=1.0.0", 448 | "nodeenv>=0.11.1", 449 | "pyyaml>=5.1", 450 | "virtualenv>=20.10.0", 451 | ] 452 | files = [ 453 | {file = "pre_commit-3.7.0-py2.py3-none-any.whl", hash = "sha256:5eae9e10c2b5ac51577c3452ec0a490455c45a0533f7960f993a0d01e59decab"}, 454 | {file = "pre_commit-3.7.0.tar.gz", hash = "sha256:e209d61b8acdcf742404408531f0c37d49d2c734fd7cff2d6076083d191cb060"}, 455 | ] 456 | 457 | [[package]] 458 | name = "pydantic" 459 | version = "2.7.0" 460 | requires_python = ">=3.8" 461 | summary = "Data validation using Python type hints" 462 | dependencies = [ 463 | "annotated-types>=0.4.0", 464 | "pydantic-core==2.18.1", 465 | "typing-extensions>=4.6.1", 466 | ] 467 | files = [ 468 | {file = "pydantic-2.7.0-py3-none-any.whl", hash = "sha256:9dee74a271705f14f9a1567671d144a851c675b072736f0a7b2608fd9e495352"}, 469 | {file = "pydantic-2.7.0.tar.gz", hash = "sha256:b5ecdd42262ca2462e2624793551e80911a1e989f462910bb81aef974b4bb383"}, 470 | ] 471 | 472 | [[package]] 473 | name = "pydantic-core" 474 | version = "2.18.1" 475 | requires_python = ">=3.8" 476 | summary = "Core functionality for Pydantic validation and serialization" 477 | dependencies = [ 478 | "typing-extensions!=4.7.0,>=4.6.0", 479 | ] 480 | files = [ 481 | {file = "pydantic_core-2.18.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:ee9cf33e7fe14243f5ca6977658eb7d1042caaa66847daacbd2117adb258b226"}, 482 | {file = "pydantic_core-2.18.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6b7bbb97d82659ac8b37450c60ff2e9f97e4eb0f8a8a3645a5568b9334b08b50"}, 483 | {file = "pydantic_core-2.18.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:df4249b579e75094f7e9bb4bd28231acf55e308bf686b952f43100a5a0be394c"}, 484 | {file = "pydantic_core-2.18.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d0491006a6ad20507aec2be72e7831a42efc93193d2402018007ff827dc62926"}, 485 | {file = "pydantic_core-2.18.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2ae80f72bb7a3e397ab37b53a2b49c62cc5496412e71bc4f1277620a7ce3f52b"}, 486 | {file = "pydantic_core-2.18.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:58aca931bef83217fca7a390e0486ae327c4af9c3e941adb75f8772f8eeb03a1"}, 487 | {file = "pydantic_core-2.18.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1be91ad664fc9245404a789d60cba1e91c26b1454ba136d2a1bf0c2ac0c0505a"}, 488 | {file = "pydantic_core-2.18.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:667880321e916a8920ef49f5d50e7983792cf59f3b6079f3c9dac2b88a311d17"}, 489 | {file = "pydantic_core-2.18.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:f7054fdc556f5421f01e39cbb767d5ec5c1139ea98c3e5b350e02e62201740c7"}, 490 | {file = "pydantic_core-2.18.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:030e4f9516f9947f38179249778709a460a3adb516bf39b5eb9066fcfe43d0e6"}, 491 | {file = "pydantic_core-2.18.1-cp310-none-win32.whl", hash = "sha256:2e91711e36e229978d92642bfc3546333a9127ecebb3f2761372e096395fc649"}, 492 | {file = "pydantic_core-2.18.1-cp310-none-win_amd64.whl", hash = "sha256:9a29726f91c6cb390b3c2338f0df5cd3e216ad7a938762d11c994bb37552edb0"}, 493 | {file = "pydantic_core-2.18.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:9ece8a49696669d483d206b4474c367852c44815fca23ac4e48b72b339807f80"}, 494 | {file = "pydantic_core-2.18.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7a5d83efc109ceddb99abd2c1316298ced2adb4570410defe766851a804fcd5b"}, 495 | {file = "pydantic_core-2.18.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f7973c381283783cd1043a8c8f61ea5ce7a3a58b0369f0ee0ee975eaf2f2a1b"}, 496 | {file = "pydantic_core-2.18.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:54c7375c62190a7845091f521add19b0f026bcf6ae674bdb89f296972272e86d"}, 497 | {file = "pydantic_core-2.18.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dd63cec4e26e790b70544ae5cc48d11b515b09e05fdd5eff12e3195f54b8a586"}, 498 | {file = "pydantic_core-2.18.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:561cf62c8a3498406495cfc49eee086ed2bb186d08bcc65812b75fda42c38294"}, 499 | {file = "pydantic_core-2.18.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:68717c38a68e37af87c4da20e08f3e27d7e4212e99e96c3d875fbf3f4812abfc"}, 500 | {file = "pydantic_core-2.18.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2d5728e93d28a3c63ee513d9ffbac9c5989de8c76e049dbcb5bfe4b923a9739d"}, 501 | {file = "pydantic_core-2.18.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f0f17814c505f07806e22b28856c59ac80cee7dd0fbb152aed273e116378f519"}, 502 | {file = "pydantic_core-2.18.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d816f44a51ba5175394bc6c7879ca0bd2be560b2c9e9f3411ef3a4cbe644c2e9"}, 503 | {file = "pydantic_core-2.18.1-cp311-none-win32.whl", hash = "sha256:09f03dfc0ef8c22622eaa8608caa4a1e189cfb83ce847045eca34f690895eccb"}, 504 | {file = "pydantic_core-2.18.1-cp311-none-win_amd64.whl", hash = "sha256:27f1009dc292f3b7ca77feb3571c537276b9aad5dd4efb471ac88a8bd09024e9"}, 505 | {file = "pydantic_core-2.18.1-cp311-none-win_arm64.whl", hash = "sha256:48dd883db92e92519201f2b01cafa881e5f7125666141a49ffba8b9facc072b0"}, 506 | {file = "pydantic_core-2.18.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:b6b0e4912030c6f28bcb72b9ebe4989d6dc2eebcd2a9cdc35fefc38052dd4fe8"}, 507 | {file = "pydantic_core-2.18.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f3202a429fe825b699c57892d4371c74cc3456d8d71b7f35d6028c96dfecad31"}, 508 | {file = "pydantic_core-2.18.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a3982b0a32d0a88b3907e4b0dc36809fda477f0757c59a505d4e9b455f384b8b"}, 509 | {file = "pydantic_core-2.18.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:25595ac311f20e5324d1941909b0d12933f1fd2171075fcff763e90f43e92a0d"}, 510 | {file = "pydantic_core-2.18.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:14fe73881cf8e4cbdaded8ca0aa671635b597e42447fec7060d0868b52d074e6"}, 511 | {file = "pydantic_core-2.18.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ca976884ce34070799e4dfc6fbd68cb1d181db1eefe4a3a94798ddfb34b8867f"}, 512 | {file = "pydantic_core-2.18.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:684d840d2c9ec5de9cb397fcb3f36d5ebb6fa0d94734f9886032dd796c1ead06"}, 513 | {file = "pydantic_core-2.18.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:54764c083bbe0264f0f746cefcded6cb08fbbaaf1ad1d78fb8a4c30cff999a90"}, 514 | {file = "pydantic_core-2.18.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:201713f2f462e5c015b343e86e68bd8a530a4f76609b33d8f0ec65d2b921712a"}, 515 | {file = "pydantic_core-2.18.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:fd1a9edb9dd9d79fbeac1ea1f9a8dd527a6113b18d2e9bcc0d541d308dae639b"}, 516 | {file = "pydantic_core-2.18.1-cp312-none-win32.whl", hash = "sha256:d5e6b7155b8197b329dc787356cfd2684c9d6a6b1a197f6bbf45f5555a98d411"}, 517 | {file = "pydantic_core-2.18.1-cp312-none-win_amd64.whl", hash = "sha256:9376d83d686ec62e8b19c0ac3bf8d28d8a5981d0df290196fb6ef24d8a26f0d6"}, 518 | {file = "pydantic_core-2.18.1-cp312-none-win_arm64.whl", hash = "sha256:c562b49c96906b4029b5685075fe1ebd3b5cc2601dfa0b9e16c2c09d6cbce048"}, 519 | {file = "pydantic_core-2.18.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:e178e5b66a06ec5bf51668ec0d4ac8cfb2bdcb553b2c207d58148340efd00143"}, 520 | {file = "pydantic_core-2.18.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:72722ce529a76a4637a60be18bd789d8fb871e84472490ed7ddff62d5fed620d"}, 521 | {file = "pydantic_core-2.18.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2fe0c1ce5b129455e43f941f7a46f61f3d3861e571f2905d55cdbb8b5c6f5e2c"}, 522 | {file = "pydantic_core-2.18.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d4284c621f06a72ce2cb55f74ea3150113d926a6eb78ab38340c08f770eb9b4d"}, 523 | {file = "pydantic_core-2.18.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1a0c3e718f4e064efde68092d9d974e39572c14e56726ecfaeebbe6544521f47"}, 524 | {file = "pydantic_core-2.18.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:2027493cc44c23b598cfaf200936110433d9caa84e2c6cf487a83999638a96ac"}, 525 | {file = "pydantic_core-2.18.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:76909849d1a6bffa5a07742294f3fa1d357dc917cb1fe7b470afbc3a7579d539"}, 526 | {file = "pydantic_core-2.18.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:ee7ccc7fb7e921d767f853b47814c3048c7de536663e82fbc37f5eb0d532224b"}, 527 | {file = "pydantic_core-2.18.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:ee2794111c188548a4547eccc73a6a8527fe2af6cf25e1a4ebda2fd01cdd2e60"}, 528 | {file = "pydantic_core-2.18.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:a139fe9f298dc097349fb4f28c8b81cc7a202dbfba66af0e14be5cfca4ef7ce5"}, 529 | {file = "pydantic_core-2.18.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d074b07a10c391fc5bbdcb37b2f16f20fcd9e51e10d01652ab298c0d07908ee2"}, 530 | {file = "pydantic_core-2.18.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c69567ddbac186e8c0aadc1f324a60a564cfe25e43ef2ce81bcc4b8c3abffbae"}, 531 | {file = "pydantic_core-2.18.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:baf1c7b78cddb5af00971ad5294a4583188bda1495b13760d9f03c9483bb6203"}, 532 | {file = "pydantic_core-2.18.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:2684a94fdfd1b146ff10689c6e4e815f6a01141781c493b97342cdc5b06f4d5d"}, 533 | {file = "pydantic_core-2.18.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:73c1bc8a86a5c9e8721a088df234265317692d0b5cd9e86e975ce3bc3db62a59"}, 534 | {file = "pydantic_core-2.18.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:e60defc3c15defb70bb38dd605ff7e0fae5f6c9c7cbfe0ad7868582cb7e844a6"}, 535 | {file = "pydantic_core-2.18.1.tar.gz", hash = "sha256:de9d3e8717560eb05e28739d1b35e4eac2e458553a52a301e51352a7ffc86a35"}, 536 | ] 537 | 538 | [[package]] 539 | name = "pymongo" 540 | version = "4.6.1" 541 | requires_python = ">=3.7" 542 | summary = "Python driver for MongoDB