├── tests
├── __init__.py
├── test_Path.py
├── dummy_data.py
├── test_scenarios.py
├── test_PDMultiHandle.py
└── test_PDHandle.py
├── poetry.toml
├── assets
├── logo.png
├── logo.afdesign
└── coverage.svg
├── PathDict.code-workspace
├── cli.sh
├── LICENSE
├── path_dict
├── __init__.py
├── path.py
├── utils.py
├── multi_path_dict.py
└── path_dict.py
├── profiler.py
├── pyproject.toml
├── tests_syntax.py
├── .gitignore
├── .github
└── workflows
│ └── test.yml
├── test_docs_examples.py
├── README.md
└── poetry.lock
/tests/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/poetry.toml:
--------------------------------------------------------------------------------
1 | [virtualenvs]
2 | in-project = true
3 | create = true
4 |
--------------------------------------------------------------------------------
/assets/logo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mkrd/PathDict/HEAD/assets/logo.png
--------------------------------------------------------------------------------
/assets/logo.afdesign:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mkrd/PathDict/HEAD/assets/logo.afdesign
--------------------------------------------------------------------------------
/PathDict.code-workspace:
--------------------------------------------------------------------------------
1 | {
2 | "folders": [
3 | {
4 | "path": "."
5 | }
6 | ],
7 | "settings": {
8 | "[python]": {
9 | "editor.formatOnSave": true,
10 | "editor.defaultFormatter": "charliermarsh.ruff"
11 | },
12 | "editor.codeActionsOnSave": {
13 | "source.organizeImports": true
14 | },
15 | }
16 | }
17 |
--------------------------------------------------------------------------------
/tests/test_Path.py:
--------------------------------------------------------------------------------
1 | from path_dict.path import Path
2 |
3 |
4 | def test_Path():
5 | path = Path("users", "1", "friends")
6 | assert path.path == ["users", "1", "friends"]
7 | assert len(path) == 3
8 | assert Path("test", "*").has_wildcards
9 | # A Path without wildcards expands to a list of itself
10 | assert path.expand({}) == [path]
11 |
--------------------------------------------------------------------------------
/cli.sh:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 | while [ $# -gt 0 ]; do case $1 in
3 |
4 |
5 | --test|-t)
6 | poetry run pytest --cov=path_dict --cov-report term-missing
7 | rm ./.coverage
8 | rm -r ./.pytest_cache
9 | shift ;;
10 |
11 |
12 | --profiler|-p)
13 | poetry run python profiler.py
14 | shift ;;
15 |
16 |
17 | *|-*|--*)
18 | echo "Unknown option $1"
19 | echo "Usage: [ -t | --test ] [ -p | --profiler ]"
20 | exit 2
21 | exit 1 ;;
22 |
23 |
24 | esac; done
25 |
--------------------------------------------------------------------------------
/tests/dummy_data.py:
--------------------------------------------------------------------------------
1 | def get_users():
2 | return {
3 | "total_users": 3,
4 | "premium_users": [1, 3],
5 | "users": {
6 | "1": {
7 | "name": "Joe",
8 | "age": 22,
9 | },
10 | "2": {
11 | "name": "Ben",
12 | "age": 49,
13 | },
14 | "3": {
15 | "name": "Sue",
16 | "age": 32,
17 | },
18 | },
19 | "follows": [
20 | ["Ben", "Sue"],
21 | ["Joe", "Ben"],
22 | ["Ben", "Joe"],
23 | ],
24 | }
25 |
26 |
27 | def get_db():
28 | return {
29 | "meta": {
30 | "version": 1,
31 | },
32 | "users": {
33 | "1": {
34 | "name": "John",
35 | "age": 20,
36 | "friends": ["2", "3"],
37 | },
38 | "2": {
39 | "name": "Jane",
40 | "age": 21,
41 | "friends": ["1", "3"],
42 | },
43 | "3": {
44 | "name": "Jack",
45 | "age": 22,
46 | "friends": ["1", "2"],
47 | },
48 | },
49 | }
50 |
--------------------------------------------------------------------------------
/assets/coverage.svg:
--------------------------------------------------------------------------------
1 |
2 |
22 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2020 mkrd
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/path_dict/__init__.py:
--------------------------------------------------------------------------------
1 | from .path_dict import PathDict
2 |
3 | pd = PathDict
4 |
5 | ################################################################################
6 | # To be used like this:
7 | # pd(db).at("u1", "meta", "last_login").get()
8 | # pd(db).at("*", "meta").get()
9 | # pd(db).at("u10", "meta", "last_login").set(1234567890)
10 | # pd(db).at("u10", "meta", "last_login").delete()
11 | # pd(db).at("u10", "meta", "last_login").exists()
12 |
13 | # We need some kind of FloatingHandle that is returned by the PathDict.at() method
14 | # This handle will be used to get, set, delete, etc. the value at the path.
15 | # If the pat in at() is a wildcard, then a FloatingWildcardHandle will be returned instead
16 | # This handle will be used to get, set, delete, etc. the values at the paths that match the wildcard
17 |
18 | # The workflow is always .at().do_something()
19 | # Except for pd(x).do_something(), then at() is implied to just select x
20 |
21 | # So we must just use the same Handle for all situations.
22 | # The Handle then needs to be able to handle all situations.
23 | # For the .do_something() after .at(), we need to tell the do_something() method
24 | # what the path is and wheater or not it is a wildcard path.
25 |
26 | # If .at(path) has wildcards, then we really need a MultiPathDict
27 | # After the .do_something(), a regular PathDict will be returned, at its previous path
28 |
--------------------------------------------------------------------------------
/profiler.py:
--------------------------------------------------------------------------------
1 | from path_dict import pd
2 | import json
3 | from pyinstrument.profiler import Profiler
4 |
5 | from path_dict.path_dict import PathDict
6 |
7 | db_directory = "./test_data/production_database"
8 |
9 |
10 | class TestObject(object):
11 | def __init__(self, data):
12 | self.data = data
13 |
14 | def get_path(self, path):
15 | print(path)
16 |
17 |
18 | users = pd(json.loads(open(f"{db_directory}/users.json", "r").read()))
19 | tasks = pd(json.loads(open(f"{db_directory}/tasks.json", "r").read()))
20 |
21 |
22 | users.filter(f=lambda k, v: v.get("status") != "archived")
23 | sorted_users_list = sorted(users.values(), key=lambda x: x["first_name"])
24 |
25 | print(f"{len(users)} users, {len(tasks)} tasks")
26 | tasks["test", "test"] = TestObject({"test": "test"})
27 |
28 |
29 | def agg(tasks: PathDict, sorted_users_list):
30 | # Get active users
31 | for user in sorted_users_list:
32 | user_active_tasks = tasks.filtered(lambda k, v: v.get("annotator_id") == user["id"] and v["status"] == "assigned_accepted")
33 | s = len(user_active_tasks)
34 | user["active_tasks_sum"] = s
35 | user_pending_tasks = tasks.filtered(lambda k, v: v.get("annotator_id") == user["id"] and v["status"] == "assigned_pending")
36 | s = len(user_pending_tasks)
37 | user["pending_tasks_sum"] = s
38 | print(user["last_name"], user["active_tasks_sum"], user["pending_tasks_sum"])
39 |
40 |
41 | profiler = Profiler(interval=0.0001)
42 | profiler.start()
43 | agg(tasks, sorted_users_list)
44 | profiler.stop()
45 | profiler.open_in_browser(timeline=True)
46 |
--------------------------------------------------------------------------------
/pyproject.toml:
--------------------------------------------------------------------------------
1 | [tool.poetry]
2 | name = "path_dict"
3 | version = "4.0.0"
4 | repository = "https://github.com/mkrd/PathDict"
5 | description = "Extends Python's dict with useful extras"
6 | authors = ["Marcel Kröker "]
7 | license = "MIT"
8 | readme = "README.md"
9 | classifiers=[
10 | "Programming Language :: Python :: 3",
11 | "License :: OSI Approved :: MIT License",
12 | "Operating System :: OS Independent",
13 | "Intended Audience :: Developers",
14 | "Programming Language :: Python",
15 | "Topic :: Software Development :: Libraries :: Python Modules"
16 | ]
17 |
18 | [tool.poetry.dependencies]
19 | python = "^3.8"
20 |
21 | [tool.poetry.group.dev.dependencies]
22 | pycodestyle = "^2.10.0"
23 | black = "^23.1.0"
24 | pytest-cov = "^4.0.0"
25 | pyinstrument = "^4.3.0"
26 |
27 | [build-system]
28 | requires = ["poetry-core"]
29 | build-backend = "poetry.core.masonry.api"
30 |
31 | [tool.ruff]
32 | show-fixes = true
33 | line-length = 120
34 | select = [
35 | "ANN", # annotations
36 | "B", # bugbear
37 | "C", # comprehensions
38 | "E", # style errors
39 | "F", # flakes
40 | "I", # import sorting
41 | "M", # meta
42 | "N", # naming
43 | "U", # upgrade
44 | "W", # style warnings
45 | "YTT", # sys.version
46 | ]
47 | ignore = [
48 | "ANN101", # missing type annotation for self
49 | "ANN202", # Missing return type annotation for private function
50 | "E501", # line length
51 | "UP006", # use Dict (not possible in python 3.8)
52 | "UP007", # use X | Y for union (not possible in python 3.8)
53 | "UP035", # use typing Dict and List (not possible in python 3.8)
54 | "W191", # indentation contains tabs
55 | ]
56 |
57 | [tool.ruff.format]
58 | indent-style = "tab"
59 | quote-style = "double"
60 |
--------------------------------------------------------------------------------
/tests_syntax.py:
--------------------------------------------------------------------------------
1 | from path_dict import PathDict as pd
2 |
3 | users ={
4 | "user_1": {
5 | "name": "John Smith",
6 | "email": "john.smith@example.com",
7 | "age": 32,
8 | "address": {
9 | "street": "123 Main St",
10 | "city": "Anytown",
11 | "state": "CA",
12 | "zip": "12345"
13 | },
14 | "interests": ["reading", "hiking", "traveling"]
15 | },
16 | "user_2": {
17 | "name": "Jane Doe",
18 | "email": "jane.doe@example.com",
19 | "age": 28,
20 | "address": {
21 | "street": "456 Oak Ave",
22 | "city": "Somewhere",
23 | "state": "NY",
24 | "zip": "67890"
25 | },
26 | "interests": ["cooking", "running", "music"],
27 | "job": {
28 | "title": "Software Engineer",
29 | "company": "Example Inc.",
30 | "salary": 80000
31 | }
32 | },
33 | "user_3": {
34 | "name": "Bob Johnson",
35 | "email": "bob.johnson@example.com",
36 | "age": 40,
37 | "address": {
38 | "street": "789 Maple Blvd",
39 | "city": "Nowhere",
40 | "state": "TX",
41 | "zip": "54321"
42 | },
43 | "interests": ["gardening", "fishing", "crafts"],
44 | "job": {
45 | "title": "Marketing Manager",
46 | "company": "Acme Corporation",
47 | "salary": 90000
48 | }
49 | },
50 | "user_4": {
51 | "name": "Alice Brown",
52 | "email": "alice.brown@example.com",
53 | "age": 25,
54 | "address": {
55 | "street": "321 Pine St",
56 | "city": "Anywhere",
57 | "state": "FL",
58 | "zip": "13579"
59 | },
60 | "interests": ["painting", "yoga", "volunteering"],
61 | "job": {
62 | "title": "Graphic Designer",
63 | "company": "Design Co.",
64 | "salary": 65000
65 | }
66 | }
67 | }
68 |
69 | users_pd = pd(users)
70 |
71 |
72 | users_pd.at("user_4")
73 |
74 |
75 |
76 | print(users_pd.at("user_5").set({"name": "John Smither", "age": 33}))
77 |
78 | # print(users_pd.at("*", "age").gather(as_type="list", include_paths=True))
79 |
--------------------------------------------------------------------------------
/path_dict/path.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | from .utils import get_nested_keys_or_indices
4 |
5 |
6 | class Path:
7 | path: list[str]
8 | raw: bool
9 |
10 | def __init__(self, *path, raw=False):
11 | # Careful, if the kwargs are passed as positional agrs, they are part of the path
12 | self.raw = raw
13 |
14 | # path is, whitout exceptions, always a tuple
15 |
16 | if len(path) == 1 and isinstance(path[0], list):
17 | # If the path is a list, then we are good to go
18 | self.path = path[0]
19 | else:
20 | self.path = list(path)
21 |
22 | # Clean up empty strings
23 | self.path = [x for x in self.path if x != ""]
24 |
25 | def __repr__(self) -> str:
26 | return f"Path(path={self.path}, raw={self.raw})"
27 |
28 | @property
29 | def has_wildcards(self):
30 | return "*" in self.path
31 |
32 | def __iter__(self):
33 | """Iterate over path keys using a for in loop"""
34 | return iter(self.path)
35 |
36 | def __len__(self):
37 | return len(self.path)
38 |
39 | def __getitem__(self, key):
40 | return self.path[key]
41 |
42 | def copy(self, replace_path=None, replace_raw=None) -> Path:
43 | path_copy = list(self.path) if replace_path is None else replace_path
44 | raw_copy = self.raw if replace_raw is None else replace_raw
45 | return Path(path_copy, raw=raw_copy)
46 |
47 | def expand(self, ref: dict | list) -> list[Path]:
48 | """
49 | Expand the path to list[Path], using the ref as a reference.
50 | """
51 | if not self.has_wildcards:
52 | return [self]
53 |
54 | paths = [[]]
55 | for key in self.path:
56 | if key != "*":
57 | # Extend all paths by the key if it is not "*"
58 | paths = [path + [key] for path in paths]
59 | else:
60 | # If key is "*", expand all paths by all keys at the respective path
61 | paths = [p + [k] for p in paths for k in get_nested_keys_or_indices(ref, p)]
62 |
63 | # Return empty list if no paths were found
64 | if paths == [[]]:
65 | return []
66 | return [Path(p, raw=self.raw) for p in paths]
67 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Test data
2 | test_data/
3 |
4 | # Byte-compiled / optimized / DLL files
5 | __pycache__/
6 | *.py[cod]
7 | *$py.class
8 |
9 | # C extensions
10 | *.so
11 |
12 | # Distribution / packaging
13 | .Python
14 | build/
15 | develop-eggs/
16 | dist/
17 | downloads/
18 | eggs/
19 | .eggs/
20 | lib/
21 | lib64/
22 | parts/
23 | sdist/
24 | var/
25 | wheels/
26 | pip-wheel-metadata/
27 | share/python-wheels/
28 | *.egg-info/
29 | .installed.cfg
30 | *.egg
31 | MANIFEST
32 |
33 | # PyInstaller
34 | # Usually these files are written by a python script from a template
35 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
36 | *.manifest
37 | *.spec
38 |
39 | # Installer logs
40 | pip-log.txt
41 | pip-delete-this-directory.txt
42 |
43 | # Unit test / coverage reports
44 | htmlcov/
45 | .tox/
46 | .nox/
47 | .coverage
48 | .coverage.*
49 | .cache
50 | nosetests.xml
51 | coverage.xml
52 | *.cover
53 | *.py,cover
54 | .hypothesis/
55 | .pytest_cache/
56 |
57 | # Translations
58 | *.mo
59 | *.pot
60 |
61 | # Django stuff:
62 | *.log
63 | local_settings.py
64 | db.sqlite3
65 | db.sqlite3-journal
66 |
67 | # Flask stuff:
68 | instance/
69 | .webassets-cache
70 |
71 | # Scrapy stuff:
72 | .scrapy
73 |
74 | # Sphinx documentation
75 | docs/_build/
76 |
77 | # PyBuilder
78 | target/
79 |
80 | # Jupyter Notebook
81 | .ipynb_checkpoints
82 |
83 | # IPython
84 | profile_default/
85 | ipython_config.py
86 |
87 | # pyenv
88 | .python-version
89 |
90 | # pipenv
91 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
92 | # However, in case of collaboration, if having platform-specific dependencies or dependencies
93 | # having no cross-platform support, pipenv may install dependencies that don't work, or not
94 | # install all needed dependencies.
95 | #Pipfile.lock
96 |
97 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow
98 | __pypackages__/
99 |
100 | # Celery stuff
101 | celerybeat-schedule
102 | celerybeat.pid
103 |
104 | # SageMath parsed files
105 | *.sage.py
106 |
107 | # Environments
108 | .env
109 | .venv
110 | env/
111 | venv/
112 | ENV/
113 | env.bak/
114 | venv.bak/
115 |
116 | # Spyder project settings
117 | .spyderproject
118 | .spyproject
119 |
120 | # Rope project settings
121 | .ropeproject
122 |
123 | # mkdocs documentation
124 | /site
125 |
126 | # mypy
127 | .mypy_cache/
128 | .dmypy.json
129 | dmypy.json
130 |
131 | # Pyre type checker
132 | .pyre/
133 |
--------------------------------------------------------------------------------
/.github/workflows/test.yml:
--------------------------------------------------------------------------------
1 | name: Tests
2 | on: [push, pull_request]
3 | jobs:
4 |
5 | # JOB
6 | tests-job:
7 | runs-on: ubuntu-latest
8 | strategy:
9 | matrix:
10 | python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"]
11 | steps:
12 | #----------------------------------------------
13 | #---- Checkout and install poetry and python
14 | #----------------------------------------------
15 |
16 | - uses: actions/checkout@v3
17 | - name: Install poetry
18 | run: pipx install poetry
19 | - uses: actions/setup-python@v4
20 | with:
21 | python-version: ${{ matrix.python-version }}
22 | cache: 'poetry'
23 |
24 | #----------------------------------------------
25 | #---- install dependencies
26 | #----------------------------------------------
27 |
28 | - name: poetry install
29 | run: poetry install
30 |
31 | #----------------------------------------------
32 | #---- Tests
33 | #----------------------------------------------
34 |
35 | - name: 🚀 Run tests with code coverage report
36 | run: poetry run pytest --cov=path_dict --cov-report term-missing
37 |
38 | #----------------------------------------------
39 | #---- Save coverage artifact
40 | #----------------------------------------------
41 | - uses: actions/upload-artifact@v3
42 | with:
43 | name: coverage
44 | path: .coverage
45 |
46 |
47 |
48 | # JOB
49 | cov-badge-job:
50 | needs: tests-job
51 | runs-on: ubuntu-latest
52 | steps:
53 | - uses: actions/checkout@v3
54 | #----------------------------------------------
55 | #---- Coverage badge
56 | #----------------------------------------------
57 | - name: ls-lah
58 | run: ls -lah
59 |
60 | - uses: actions/download-artifact@v3
61 | with:
62 | name: coverage
63 |
64 | - name: GenerateCoverage Badge
65 | uses: tj-actions/coverage-badge-py@v1.8
66 | with:
67 | output: assets/coverage.svg
68 |
69 | - name: Verify Changed files
70 | uses: tj-actions/verify-changed-files@v11.1
71 | id: changed_files
72 | with:
73 | files: assets/coverage.svg
74 |
75 | - name: Commit files
76 | if: steps.changed_files.outputs.files_changed == 'true'
77 | run: |
78 | git config --local user.email "github-actions[bot]@users.noreply.github.com"
79 | git config --local user.name "github-actions[bot]"
80 | git add assets/coverage.svg
81 | git commit -m "Updated assets/coverage.svg"
82 |
83 | - name: Push changes
84 | if: steps.changed_files.outputs.files_changed == 'true'
85 | uses: ad-m/github-push-action@master
86 | with:
87 | github_token: ${{ secrets.github_token }}
88 | branch: ${{ github.ref }}
89 |
--------------------------------------------------------------------------------
/path_dict/utils.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | from typing import Any
4 |
5 |
6 | def fast_deepcopy(obj):
7 | """
8 | Makes a fast deep copy of the object.
9 | dict, list, tuple, set, str, int, float and bool are truly copied.
10 | Everything else is just copied by reference.
11 |
12 | :param obj: The object to be copied
13 | """
14 | # Fast exit for immutable types
15 | if isinstance(obj, (int, str, bool, float)):
16 | return obj
17 | # Copy key and value for dicts
18 | if isinstance(obj, dict):
19 | return {fast_deepcopy(k): fast_deepcopy(v) for k, v in obj.items()}
20 | # Copy all other supported types
21 | if (t := type(obj)) in (list, tuple, set):
22 | return t(fast_deepcopy(v) for v in obj)
23 | # Everything else is copied by reference
24 | return obj
25 |
26 |
27 | def safe_list_get(current, key):
28 | try:
29 | return current[int(key)]
30 | except (ValueError, IndexError) as e:
31 | raise KeyError(f"PathDict: invalid path ({key} not in {current})") from e
32 |
33 |
34 | def guarded_get(current: dict | list, key: Any):
35 | """
36 | Get the value at the given key. If current is a dict, return None if the key
37 | does not exist. If current is a list, return a KeyError if a problem occurs.
38 |
39 | :param: current: The current dictionary or list we're looking at
40 | :param: key: The key to look up
41 | """
42 |
43 | if isinstance(current, dict):
44 | return None if key not in current else current[key]
45 | if isinstance(current, list):
46 | return safe_list_get(current, key)
47 | raise KeyError(
48 | f"PathDict: The path is not a stack of nested dicts and lists " f"(value at key {key} has type {type(current)})"
49 | )
50 |
51 |
52 | def guarded_descent(current, key):
53 | """
54 | Like guarded_get, but create new empty dicts on the way down if necessary.
55 | """
56 |
57 | if isinstance(current, dict):
58 | current.setdefault(key, {})
59 | return current[key]
60 | if isinstance(current, list):
61 | return safe_list_get(current, key)
62 | raise KeyError("Can't set the key of a non-dict")
63 |
64 |
65 | def get_nested_keys_or_indices(ref: dict | list, path: list):
66 | """
67 | :param ref: The reference dictionary or list
68 | :param path: The path to object or list we want to get the keys or indices of
69 |
70 | If the nested object is a list, the return a list of all list indices.
71 |
72 | If the path does not exist, return [].
73 |
74 | If there is something in path where we cannot continue to traverse the
75 | tree of dicts and lists (e.g. due to trying to get a string key from a list),
76 | a KeyError is raised.
77 | """
78 |
79 | current = ref
80 | # Traverse the path of dicts and lists.
81 | for key in path:
82 | current = guarded_get(current, key)
83 | if current is None:
84 | return []
85 |
86 | # Return keys or indices or raise KeyError
87 | if isinstance(current, dict):
88 | return list(current.keys())
89 | if isinstance(current, list):
90 | return list(range(len(current)))
91 | raise KeyError(
92 | f"PathDict: The path is not a stack of nested dicts and lists " f"(value at key {key} has type {type(current)})"
93 | )
94 |
--------------------------------------------------------------------------------
/test_docs_examples.py:
--------------------------------------------------------------------------------
1 | from path_dict import pd
2 | from datetime import datetime, timedelta
3 |
4 |
5 | users = {
6 | "u1": {
7 | "name": "Julia",
8 | "age": 32,
9 | "interests": ["soccer", "basketball"],
10 | },
11 | "u2": {
12 | "name": "Ben",
13 | "age": 26,
14 | "interests": ["airplanes", "alternative music"]
15 | }
16 | }
17 |
18 |
19 | users = pd(users)
20 |
21 | # Get all user names
22 | users["*", "name"] # -> ["Julia", "Ben"]
23 | print(f"{users['*', 'name'] = }")
24 |
25 | # Increase age of Julia
26 | users["u1", "age"] = 33
27 | print(f"{users['u1', 'age'] = }") # -> 33
28 |
29 | # Append interest "cooking" to all users
30 | users["*", "interests"] = lambda i: i + ["cooking"]
31 | print(users)
32 |
33 | # Remove all interests of Ben which do not start with "a" ("cooking is removed")
34 | users.at("u2", "interests").filter(lambda i: not i.startswith("a"))
35 | print(users)
36 |
37 | # Remove users that are younger than 30
38 | users.at().filter(lambda id, user: user["age"] >= 30)
39 | print(users)
40 |
41 |
42 |
43 |
44 | crawler_output = {
45 | "crawl_time": "2021-09-01 12:00:00",
46 | "posts": [
47 | {
48 | "id": 1,
49 | "title": "Hello World",
50 | "meta": {
51 | "deleted": True,
52 | "author": "u1"
53 | }
54 | },
55 | {
56 | "id": 2,
57 | "title": "Hello aswell",
58 | "meta": {
59 | "author": "u2"
60 | }
61 | }
62 | ]
63 | }
64 |
65 | # # Get all deleted Posts:
66 | # deleted_posts = []
67 | # for post in crawler_output["posts"]:
68 | # if "meta" in post:
69 | # if post["meta"].get("deleted", False):
70 | # deleted_posts.append(post)
71 |
72 | # # Or
73 | # deleted_posts = [post for post in crawler_output["posts"] if post.get("meta", {}).get("deleted", False)]
74 |
75 | # # Remove all deleted posts
76 | # db["posts"] = [post for post in crawler_output["posts"] if not post.get("meta", {}).get("deleted", False)]
77 |
78 | # print("Deleted Posts:")
79 | # print(deleted_posts)
80 |
81 | # # PD version get deleted posts
82 | # pd = PathDict(crawler_output)
83 | # deleted_posts = pd.filtered("posts", lambda x: x["meta", "deleted"])["posts"]
84 | # print(deleted_posts)
85 | # # Current
86 | # deleted_posts = crawler_output.filtered("posts", lambda x: x["meta", "deleted"])["posts"]
87 | # # New alternative 1
88 | # deleted_posts = pd(crawler_output).filtered("posts", lambda x: x["meta", "deleted"])["posts"]
89 | # # New alternative 2
90 | # deleted_posts = pd.filtered(crawler_output, "posts", lambda x: x["meta", "deleted"])["posts"]
91 |
92 |
93 |
94 | # # PD version remove deleted posts
95 | # pd.filter("posts", f=lambda x: not x["meta", "deleted"])
96 |
97 |
98 |
99 | posts = {
100 | "p1": {
101 | "ts": 1663566212,
102 | "title": "Hello World",
103 | "meta": {"author": "u1", "deleted": True},
104 | },
105 | "p2": {
106 | "ts": 1666158212,
107 | "title": "Hello aswell",
108 | "meta": {"author": "u2"},
109 | },
110 | "p3": {
111 | "ts": 1666331012,
112 | "title": "Hello again",
113 | },
114 | }
115 |
116 |
117 | last_week = datetime.now() - timedelta(weeks=1)
118 | pd(posts).at("*", "ts").map(lambda ts: datetime.fromtimestamp(ts))
119 | pd(posts).filter(lambda id, post: post["ts"] > last_week)
120 |
--------------------------------------------------------------------------------
/tests/test_scenarios.py:
--------------------------------------------------------------------------------
1 | import pytest
2 |
3 | from path_dict import pd
4 |
5 |
6 | def test_scenario_1():
7 | d = {
8 | "total_users": 3,
9 | "premium_users": [1, 3],
10 | "users": {
11 | "1": {"name": "Joe", "age": 22},
12 | "2": {"name": "Ben", "age": 49},
13 | "3": {"name": "Sue", "age": 32},
14 | },
15 | "follows": [
16 | ["Ben", "Sue"],
17 | ["Joe", "Ben"],
18 | ["Ben", "Joe"],
19 | ],
20 | }
21 | o = pd(d)
22 | # Getting attributes
23 | assert o["total_users"] == 3
24 | assert o["not_exists"] is None
25 | assert o["users"] == {
26 | "1": {"name": "Joe", "age": 22},
27 | "2": {"name": "Ben", "age": 49},
28 | "3": {"name": "Sue", "age": 32},
29 | }
30 | assert o["users", "1"] == {"name": "Joe", "age": 22}
31 | assert o["users", "3", "name"] == "Sue"
32 | assert o["follows"][0] == ["Ben", "Sue"]
33 | # Setting attributes
34 | o["total_users"] = 4
35 | assert o["total_users"] == 4
36 | o["users", "3", "age"] = 99
37 | assert o["users", "3", "age"] == 99
38 | o["users", "4"] = {"name": "Ron", "age": 62}
39 | assert o["users", "4"] == {"name": "Ron", "age": 62}
40 | o["1", "1", "1", "1"] = 1
41 | assert o["1", "1", "1"] == {"1": 1}
42 | # Apply functions
43 | o["follows"] = lambda x: [list(reversed(e)) for e in x]
44 | assert o["follows"] == [["Sue", "Ben"], ["Ben", "Joe"], ["Joe", "Ben"]]
45 |
46 | assert o.get() == {
47 | "1": {"1": {"1": {"1": 1}}},
48 | "total_users": 4,
49 | "premium_users": [1, 3],
50 | "users": {
51 | "1": {"name": "Joe", "age": 22},
52 | "2": {"name": "Ben", "age": 49},
53 | "3": {"name": "Sue", "age": 99},
54 | "4": {"name": "Ron", "age": 62},
55 | },
56 | "follows": [["Sue", "Ben"], ["Ben", "Joe"], ["Joe", "Ben"]],
57 | }
58 |
59 |
60 | def test_scenario_2():
61 | tr = pd(
62 | {
63 | "1": {
64 | "date": "2018-01-01",
65 | "amount": 100,
66 | "currency": "EUR",
67 | },
68 | "2": {"date": "2018-01-02", "amount": 200, "currency": "CHF", "related": [5, {"nested": "val"}, 2, 3]},
69 | }
70 | )
71 |
72 | assert tr["2", "related", 1, "nested"] == "val"
73 |
74 | with pytest.raises(KeyError):
75 | print(tr["2", "related", 9])
76 | with pytest.raises(KeyError):
77 | print(tr["2", "related", 0, "nested", "val"])
78 |
79 |
80 | def test_scenario_3():
81 | u = pd(
82 | {
83 | "1": {
84 | "name": "Joe",
85 | "currencies": ["EUR", "CHF"],
86 | "expenses": {
87 | "1": {"amount": 100, "currency": "EUR"},
88 | "2": {"amount": 50, "currency": "CHF"},
89 | "3": {"amount": 200, "currency": "EUR"},
90 | },
91 | },
92 | "2": {
93 | "name": "Ben",
94 | "currencies": ["EUR", "USD"],
95 | "expenses": {
96 | "1": {"amount": 3, "currency": "EUR"},
97 | "2": {"amount": 40, "currency": "USD"},
98 | "3": {"amount": 10, "currency": "USD"},
99 | },
100 | },
101 | "3": {
102 | "name": "Sue",
103 | "currencies": ["CHF", "USD"],
104 | "expenses": {
105 | "1": {"amount": 500, "currency": "CHF"},
106 | "2": {"amount": 300, "currency": "CHF"},
107 | "3": {"amount": 200, "currency": "USD"},
108 | },
109 | },
110 | }
111 | )
112 |
113 | assert u.at("*", "expenses", "*", "amount").sum() == 1403
114 | assert u.at("2", "expenses", "*", "amount").sum() == 53
115 | assert u.at("*", "expenses", "1", "amount").sum() == 603
116 | # Get sum of all expenses in EUR
117 |
118 | assert (
119 | u.deepcopy(from_root=True)
120 | .at("*", "expenses", "*")
121 | .filter(lambda v: v["currency"] == "EUR")
122 | .at("*", "amount")
123 | .sum()
124 | == 303
125 | )
126 |
127 | # Get all transactions in CHF except for those of sue
128 | assert (
129 | u.at("*")
130 | .filter(lambda x: x["name"] != "Sue")
131 | .at("*", "expenses", "*")
132 | .filter(lambda v: v["currency"] == "CHF")
133 | .at("*", "amount")
134 | .sum()
135 | == 50
136 | )
137 |
138 | j = pd(
139 | {
140 | "a": [1, 2],
141 | "b": {"c": 1, "d": 2},
142 | "e": 5,
143 | }
144 | )
145 | assert j.at("a").sum() == 3
146 | assert j.at("b").sum() == 3
147 | with pytest.raises(TypeError):
148 | j.at("e").sum()
149 |
--------------------------------------------------------------------------------
/path_dict/multi_path_dict.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | from typing import Any, Callable
4 |
5 | from .path import Path
6 | from .path_dict import PathDict
7 |
8 |
9 | class MultiPathDict:
10 | path_handle: Path
11 | root_data: dict | list
12 |
13 | def __init__(self, data: dict | list, path: Path):
14 | self.path_handle = path
15 | self.root_data = data
16 |
17 | def __repr__(self) -> str:
18 | return f"MultiPathDict({self.root_data = }, {self.path_handle = })"
19 |
20 | ############################################################################
21 | # Setters
22 | # Setters ALWAYS return a value
23 | ############################################################################
24 |
25 | def gather(self, as_type="list", include_paths=False) -> dict | list:
26 | """
27 | Get the actual value at the given path.
28 |
29 | :param as_pd: If true, return a PathDict on the result. Else, return the dict or list itself.
30 | :param as_type: If list, return PathDict on list of values. If dict, return PathDict on dict that looks like {tuple(path): value}
31 | :param include_paths: If true and as_type is list, return a list of (path, value) tuples instead of just values.
32 | :return: PathDict on the result.
33 | """
34 | if as_type not in ["list", "dict"]:
35 | raise ValueError("Can only return as dict or list, not both")
36 |
37 | handle = PathDict.from_data_and_path(self.root_data, self.path_handle)
38 | if as_type == "list":
39 | res = []
40 | for path in self.path_handle.expand(self.root_data):
41 | data = handle.at(path.path).get()
42 | res.append((tuple(path.path), data) if include_paths else data)
43 | # as_type == "dict"
44 | else:
45 | res = {}
46 | for path in self.path_handle.expand(self.root_data):
47 | res[tuple(path.path)] = handle.at(path.path).get()
48 | return res
49 |
50 | def gather_pd(self, as_type="list", include_paths=False) -> PathDict:
51 | data = self.gather(as_type=as_type, include_paths=include_paths)
52 | return PathDict.from_data_and_path(data, self.path_handle.copy(replace_path=[]))
53 |
54 | ############################################################################
55 | # Setters
56 | # Setters ALWAYS return a handle, not the value.
57 | ############################################################################
58 |
59 | def map(self, f: Callable) -> PathDict:
60 | """
61 | Map the result of f to the value at path previously set by ".at(path)".
62 |
63 | :return: The handle itself for further operations.
64 | """
65 | for path in self.path_handle.expand(self.root_data):
66 | PathDict.from_data_and_path(self.root_data, path).map(f)
67 | return PathDict.from_data_and_path(self.root_data, self.path_handle)
68 |
69 | def reduce(self, f: Callable, aggregate: Any, as_type="list", include_paths=False) -> Any:
70 | """
71 | Get all values of the given multi-path, and reduce them using f.
72 | """
73 | return self.gather_pd(as_type=as_type, include_paths=include_paths).reduce(f, aggregate)
74 |
75 | ############################################################################
76 | #### Filter
77 | ############################################################################
78 |
79 | def filter(self, f: Callable, as_type="list", include_paths=False) -> PathDict:
80 | """
81 | At the current path only keep the elements for which f(key, value)
82 | is True for dicts, or f(value) is True for lists.
83 | """
84 | return self.gather_pd(as_type=as_type, include_paths=include_paths).filter(f)
85 |
86 | # def filtered(self, f: Callable[[Any], bool], as_type="list", include_paths=False) -> PathDict:
87 | # raise NotImplementedError
88 |
89 | ############################################################################
90 | #### Useful shorthands
91 | ############################################################################
92 |
93 | def sum(self) -> Any:
94 | """
95 | Sum all values at the given multi-path.
96 | """
97 | return sum(self.gather())
98 |
99 | def set(self, value: Any) -> PathDict:
100 | for path in self.path_handle.expand(self.root_data):
101 | PathDict.from_data_and_path(self.root_data, path).set(value)
102 | return self
103 |
104 | ############################################################################
105 | #### Standard dict methods
106 | ############################################################################
107 |
--------------------------------------------------------------------------------
/tests/test_PDMultiHandle.py:
--------------------------------------------------------------------------------
1 | import pytest
2 |
3 | from path_dict import pd
4 | from tests import dummy_data
5 |
6 |
7 | def test_get_all():
8 | db = {
9 | "a": {
10 | "a1": 1,
11 | "a2": 2,
12 | "a3": 3,
13 | },
14 | "b": {
15 | "b1": 4,
16 | "b2": 5,
17 | "b3": 6,
18 | },
19 | }
20 |
21 | p = pd(db)
22 |
23 | assert p.at("nonexistent", "*").gather() == []
24 | assert p.at("*", "nonexistent", "*").gather() == []
25 | # assert p.at("*", "nonexistent").gather() == []
26 |
27 | # Finds all values, returns as list
28 | assert p.at("*").gather() == [
29 | {
30 | "a1": 1,
31 | "a2": 2,
32 | "a3": 3,
33 | },
34 | {
35 | "b1": 4,
36 | "b2": 5,
37 | "b3": 6,
38 | },
39 | ]
40 |
41 | assert p.at("*", "a1").gather() == [1, None]
42 | assert p.at("*", "*").gather() == [1, 2, 3, 4, 5, 6]
43 |
44 | assert p.at("*", "*").gather(include_paths=True) == [
45 | (("a", "a1"), 1),
46 | (("a", "a2"), 2),
47 | (("a", "a3"), 3),
48 | (("b", "b1"), 4),
49 | (("b", "b2"), 5),
50 | (("b", "b3"), 6),
51 | ]
52 |
53 | assert p.at("*", "*").gather(as_type="dict") == {
54 | ("a", "a1"): 1,
55 | ("a", "a2"): 2,
56 | ("a", "a3"): 3,
57 | ("b", "b1"): 4,
58 | ("b", "b2"): 5,
59 | ("b", "b3"): 6,
60 | }
61 |
62 | with pytest.raises(ValueError):
63 | p.at("*", "*").gather(as_type="invalid")
64 |
65 |
66 | def test_get_all_2():
67 | p = pd(
68 | {
69 | "1": {
70 | "name": "Joe",
71 | "age": 22,
72 | "interests": ["Python", "C++", "C#"],
73 | },
74 | "2": {
75 | "name": "Ben",
76 | "age": 49,
77 | "interests": ["Javascript", "C++", "Haskell"],
78 | },
79 | "3": {
80 | "name": "Sue",
81 | "age": 36,
82 | "interests": ["Python", "C++", "C#"],
83 | },
84 | }
85 | )
86 |
87 | ages = p.at(["*", "age"]).gather()
88 | assert ages == [22, 49, 36]
89 |
90 | ages_sum = sum(p.at("*", "age").gather())
91 | assert ages_sum == 107
92 |
93 | # ages_over_30 = p.at("*", "age").filtered(lambda x: x > 30)
94 | # print(ages_over_30)
95 | # assert ages_over_30 == [49, 36]
96 |
97 | # interests = pd["*", "interests"]
98 | # assert interests == [
99 | # ["Python", "C++", "C#"],
100 | # ["Javascript", "C++", "Haskell"],
101 | # ["Python", "C++", "C#"],
102 | # ]
103 |
104 |
105 | def test_get_all_3():
106 | p = pd(
107 | {
108 | "1": [2, 3, 4],
109 | "2": "3",
110 | }
111 | )
112 | assert p.at("1", "*").gather() == [2, 3, 4]
113 | with pytest.raises(KeyError):
114 | p.at("2", "*").gather()
115 |
116 |
117 | def test_gather():
118 | winners_original = pd(
119 | {
120 | "2017": {
121 | "podium": {
122 | "17-place-1": {"name": "Joe", "age": 22},
123 | "17-place-2": {"name": "Ben", "age": 13},
124 | "17-place-3": {"name": "Sue", "age": 98},
125 | },
126 | "prices_list": ["Car", "Bike", "Plane"],
127 | },
128 | "2018": {
129 | "podium": {
130 | "18-place-1": {"name": "Bernd", "age": 50},
131 | "18-place-2": {"name": "Sara", "age": 32},
132 | "18-place-3": {"name": "Jan", "age": 26},
133 | },
134 | "prices_list": ["Beer", "Coffee", "Cigarette"],
135 | },
136 | }
137 | )
138 |
139 | # Get names of all winners
140 | winners = winners_original.deepcopy(from_root=True)
141 | assert winners.at("*", "podium", "*", "name").gather() == ["Joe", "Ben", "Sue", "Bernd", "Sara", "Jan"]
142 |
143 | # Increment age of all users by 1
144 | winners = winners_original.deepcopy(from_root=True)
145 | winners.at("*", "podium", "*", "age").map(lambda x: x + 1)
146 | assert winners["2017", "podium", "17-place-1", "age"] == 23
147 | assert winners["2017", "podium", "17-place-2", "age"] == 14
148 | assert winners["2017", "podium", "17-place-3", "age"] == 99
149 | assert winners["2018", "podium", "18-place-1", "age"] == 51
150 | assert winners["2018", "podium", "18-place-2", "age"] == 33
151 | assert winners["2018", "podium", "18-place-3", "age"] == 27
152 |
153 | names_2017 = winners.at("2017", "podium", "*", "name").gather()
154 | assert names_2017 == ["Joe", "Ben", "Sue"]
155 |
156 |
157 | def test_sum():
158 | p = pd(
159 | {
160 | "1": {"a": 1, "b": [1]},
161 | "2": {"a": 3, "b": [1]},
162 | }
163 | )
164 | assert p.at("*", "a").sum() == 4
165 | with pytest.raises(TypeError):
166 | p.at("*", "b").sum()
167 |
168 |
169 | def test__repr__():
170 | p = pd({})
171 | assert str(p.at("*")) == "MultiPathDict(self.root_data = {}, self.path_handle = Path(path=['*'], raw=False))"
172 |
173 |
174 | def test_set():
175 | db = {
176 | "meta": {
177 | "version": 1,
178 | },
179 | "users": {
180 | "1": {
181 | "name": "John",
182 | "age": 20,
183 | "friends": ["2", "3"],
184 | },
185 | "2": {
186 | "name": "Jane",
187 | "age": 21,
188 | "friends": ["1", "3"],
189 | },
190 | "3": {
191 | "name": "Jack",
192 | "age": 22,
193 | "friends": ["1", "2"],
194 | },
195 | },
196 | }
197 | p = pd(db).deepcopy()
198 | p.at("users", "*", "friends").set([])
199 | p["users", "*", "blip"] = "blap"
200 | assert p["users", "1", "friends"] == []
201 | assert p["users", "2", "friends"] == []
202 | assert p["users", "3", "friends"] == []
203 | assert p["users", "1", "blip"] == "blap"
204 | assert p["users", "2", "blip"] == "blap"
205 | assert p["users", "3", "blip"] == "blap"
206 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 |
2 | 
3 |
4 | [](https://pepy.tech/project/path-dict)
5 | 
6 | 
7 |
8 |
9 | Why do I need this?
10 | ================================================================================
11 |
12 | Do you work with dicts a lot, but you also wish that they could do more?
13 | Then PathDict is for you!
14 |
15 | Lets look at this dict:
16 |
17 | ```python
18 | users = {
19 | "u1": {
20 | "name": "Julia",
21 | "age": 32,
22 | "interests": ["soccer", "basketball"],
23 | },
24 | "u2": {
25 | "name": "Ben",
26 | "age": 26,
27 | "interests": ["airplanes", "alternative music"],
28 | }
29 | }
30 | ```
31 |
32 | With PathDict, you can do things like:
33 |
34 | ```python
35 | users = PathDict(users)
36 |
37 | # Get all user names
38 | users["*", "name"] # -> ["Julia", "Ben"]
39 |
40 | # Add new post to the current_user's posts
41 | new_post = {"title": ...}
42 | users[current_user.id, "posts"] = lambda x: (x or []) + [new_post] # Key "posts" is automatically created!
43 |
44 |
45 | # Increase age of Julia
46 | users["u1", "age"] = 33
47 |
48 | # Append interest "cooking" to all users
49 | users["*", "interests"] = lambda interests: interests + ["cooking"]
50 |
51 |
52 | # Remove all interests of Ben which do not start with "a" ("cooking is removed")
53 | users.filter("u2", "interests", f=lambda interest: not interest.startswith("a"))
54 |
55 | # Remove users that are younger than 30
56 | users.filter(f=lambda id, user: user["age"] >= 30)
57 | ```
58 |
59 | **Pretty neat, right?**
60 |
61 |
62 |
63 | Second Example
64 | --------------------------------------------------------------------------------
65 |
66 | Consider the following dict filled with users. Notice how Bob has
67 | provided sports interests only, and Julia has provided music interests only.
68 | ```python
69 | db = {
70 | "bob": {
71 | "interests": {
72 | "sports": ["soccer", "basketball"]
73 | }
74 | },
75 | "julia": {
76 | "interests": {
77 | "music": ["pop", "alternative"]
78 | }
79 | }
80 | }
81 | ```
82 |
83 | Lets print the music interests of each user using normal dicts:
84 |
85 | ```python
86 | for user_name in db:
87 | user_music = None
88 | if user_name in db:
89 | if "interests" in db[user_name]:
90 | if "music" in db[user_name]["interests"]:
91 | user_music = db[user_name]["interests"]["music"]
92 | print(user_music)
93 |
94 | # ---> None
95 | # ---> ["pop", "alternative"]
96 | ```
97 |
98 | **Annoying, right?** This is how we do it with a PathDict:
99 |
100 | ```python
101 | db = PathDict(db)
102 | for user_name in db:
103 | print(db[user_name, "interests", "music"])
104 |
105 | # ---> None
106 | # ---> ["pop", "alternative"]
107 |
108 | ```
109 |
110 | **Much better!** If any of the keys do not exist, it will not throw and error,
111 | but return `None`.
112 |
113 | If we tried this with a normal dict, we would have gotten a `KeyError`.
114 |
115 | The same also works for setting values, if the path does not exist, it will be
116 | created.
117 |
118 | Installation
119 | ================================================================================
120 |
121 | `pip3 install path-dict`
122 |
123 | ```python
124 | from path_dict import PathDict
125 | ```
126 |
127 | Usage
128 | ================================================================================
129 |
130 | PathDict subclasses [collections.UserDict](https://docs.python.org/3/library/collections.html#collections.UserDict),
131 | so it behaves almist like a normal python dict, but comes with some handy extras.
132 |
133 | ## Initialize
134 |
135 | ```python
136 | # Empty PathDict
137 | pd = PathDict()
138 |
139 | > pd
140 | ---> PathDict({})
141 | ```
142 |
143 | A PathDict keeps a reference to the original initializing dict:
144 |
145 | ```python
146 | user = {
147 | "name": "Joe",
148 | "age": 22,
149 | "hobbies": ["Playing football", "Podcasts"]
150 | "friends": {
151 | "Sue": {"age": 30},
152 | "Ben": {"age": 35},
153 | }
154 | }
155 | joe = PathDict(user)
156 | > joe == user
157 | ---> True
158 | > joe.dict is user
159 | ---> True
160 | ```
161 |
162 | You can also get a deep copy:
163 |
164 | ```python
165 | joe = PathDict(user, copy=True)
166 | > joe == user
167 | ---> True
168 | > joe.dict is user
169 | ---> False
170 | ```
171 |
172 | ## Getting and setting values with paths
173 |
174 | You can use paths of keys to access values:
175 |
176 | ```python
177 | joe = PathDict(user, copy=True)
178 |
179 | # Get existing path
180 | > joe["friends", "Sue", "age"]
181 | ---> 30
182 |
183 | # Get non-existent, but valid path
184 | > joe["friends", "Josef", "age"]
185 | ---> None
186 |
187 | # Set non-existent, but valid path, creates keys
188 | joe["authentification", "password"] = "abc123"
189 | > joe["authentification"]
190 | ---> PathDict({"password": "abc123"})
191 | ```
192 |
193 | Using invalid paths to get or set a value will result in an error. An invalid path is a path that tries to access a key of an int or list, for example. So, only use paths to access hierarchies of PathDicts.
194 |
195 |
196 | ```python
197 | joe = PathDict(user, copy=True)
198 |
199 | # Get invalid path (joe["hobbies"] is a list)
200 | > joe["hobbies", "not_existent"]
201 | ---> Error!
202 | ```
203 |
204 |
205 |
206 | ## Most dict methods are supported
207 |
208 | Many of the usual dict methods work with PathDict:
209 |
210 | ```python
211 | pathdict = ...
212 |
213 | for key, value in pathdict.items():
214 | ...
215 |
216 | for key in pathdict:
217 | ...
218 |
219 | for key in pathdict.keys():
220 | ...
221 |
222 | for value in pathdict.values():
223 | ...
224 |
225 | ```
226 |
227 | ## Apply a function at a path
228 |
229 | When setting a value, you can use a lambda function to modify the value at a given path.
230 | The function should take one argument and return the modified value.
231 |
232 |
233 | ```python
234 | stats_dict = {}
235 | stats_pd = PathDict({})
236 |
237 | # Using a standard dict:
238 | if "views" not in stats_dict:
239 | stats_dict["views"] = {}
240 | if "total" not in stats_dict["views"]:
241 | stats_dict["views"]["total"] = 0
242 | stats_dict["views"]["total"] += 1
243 |
244 | # Using a PathDict:
245 | stats_pd["views", "total"] = lambda x: (x or 0) + 1
246 | ```
247 |
248 | ## Filtering
249 |
250 | PathDicts offer a filter function, which can filter a list or a PathDict at a given path in-place.
251 |
252 | To filter a list, pass a function that takes one argument (eg. `lambda ele: return ele > 10`) and returns True if the value should be kept, else False.
253 | To filter a PathDict, pass a function that takes two arguments (eg. `lambda key, value: key != "1"`) and returns True if the key-value pair should be kept, else False.
254 |
255 | You can filter the PathDict filter is called on, or you can also pass a path into the filter to apply the filter at a given path.
256 |
257 | A filtered function is also offered, which does the same, but returns a filtered copy instead of filtering in-place.
258 |
259 |
260 | ```python
261 | joe = PathDict(user, copy=True)
262 |
263 | # Remove all friends that are older than 33.
264 | joe.filter("friends", f=lambda k, v: v["age"] <= 33)
265 |
266 | > joe["friends"]
267 | ---> PathDict({
268 | "Sue": {"age": 30}
269 | })
270 | ```
271 |
272 | ## Aggregating
273 |
274 | The aggregate function can combine a PathDict to a single aggregated value.
275 | It takes an init parameter, and a function with takes three arguments (eg. `lambda key, val, agg`)
276 |
277 | ```python
278 | joe = PathDict(user, copy=True)
279 |
280 | # Sum of ages of all friends of joe
281 | friend_ages = joe.aggregate("friends", init=0, f=lambda k, v, a: a + v["age"])
282 |
283 | > friend_ages
284 | ---> 65
285 | ```
286 |
287 | ## Serialize to JSON
288 |
289 | To serialize a PathDict to JSON, call `json.dumps(.dict)`.
290 | If you try to serialize a PathDict object itself, the operation will fail.
291 |
292 |
293 |
294 | # Reference
295 |
296 |
297 | ### pd(data: dict | list, raw=False) -> PathDict
298 |
299 | Creates and returns a handle on the given data.
300 |
301 | Args:
302 | - `data` - Must be a list or dict.
303 | - `raw` - If `True`, do not interpret paths. So wildcards (`*`) are interpreted as a usual key, and tuples will be interpreted as keys as well.
304 |
305 | Returns:
306 | - A handle that references the root of the given data dict or list.
307 |
308 |
309 | ## PathDict
310 |
311 | ### copy(self, from_root=False) -> PathDict
312 |
313 | Return a deep copy of the data at the current path or from the root.
314 |
315 | Args:
316 | - `from_root` - If `True`, the copy will not be made at the root data, and not where the current path is. The path handle will be at the same location as it was in the original. If `False`, only the part of the data where the current path handle is at will be copied.
317 |
318 | Returns:
319 | - A handle on the newly created copy
320 |
321 | The current path handle will stay the same.
322 |
--------------------------------------------------------------------------------
/tests/test_PDHandle.py:
--------------------------------------------------------------------------------
1 | import pytest
2 |
3 | from path_dict import pd
4 | from tests import dummy_data
5 |
6 |
7 | def test_initialization():
8 | # Empty
9 | assert pd({}).get() == {}
10 |
11 | # Wrong inits
12 | for wrong in ["", 1, 1.0, True, None, (), set()]:
13 | with pytest.raises(TypeError):
14 | pd(wrong)
15 |
16 | # Check data
17 | init_dict = dummy_data.get_users()
18 | p = pd(init_dict)
19 | assert p.data is init_dict
20 | assert p is not init_dict
21 | assert p.get() == init_dict
22 | assert p["users"] is init_dict["users"]
23 | assert isinstance(p["premium_users"], list)
24 | assert p["premium_users"] is init_dict["premium_users"]
25 |
26 |
27 | def test_at():
28 | db = dummy_data.get_db()
29 | assert pd(db).at().path_handle.path == []
30 | assert pd(db).at("").path_handle.path == []
31 | assert pd(db).at([]).path_handle.path == []
32 | assert pd(db).at("users").path_handle.path == ["users"]
33 | assert pd(db).at(["users"]).path_handle.path == ["users"]
34 | assert pd(db).at("users", "1").path_handle.path == ["users", "1"]
35 | assert pd(db).at("users", "1", "friends").path_handle.path == ["users", "1", "friends"]
36 | assert pd(db).at(["users", "1", "friends"]).path_handle.path == ["users", "1", "friends"]
37 |
38 |
39 | def test_at_parent():
40 | assert pd(dummy_data.get_db()).at("users", "1").at_parent().path_handle.path == ["users"]
41 | pd(dummy_data.get_db()).at_parent().get() is None
42 |
43 |
44 | def test_at_children():
45 | db = dummy_data.get_db()
46 | assert pd(db).at("users").at_children().gather() == list(db["users"].values())
47 |
48 |
49 | def test_simple_get():
50 | db = dummy_data.get_db()
51 | assert pd(db).get() == db
52 | assert pd(db).at("").get() == db
53 | assert pd(db).at().get() == db
54 | assert pd(db).at("users", "1", "name").get() == "John"
55 | assert pd(db).at("users", "9", "name").get() is None
56 | assert pd(db).at(2).get() is None
57 | assert pd(db).at("users", "9", "name").get("default") == "default"
58 |
59 |
60 | def test_referencing():
61 | j_table = {
62 | "j1": {},
63 | "j2": {},
64 | "j3": {},
65 | "j4": {},
66 | }
67 |
68 | for a in j_table:
69 | for b in j_table:
70 | p1, p2 = pd(j_table[a]).get(), pd(j_table[b]).get()
71 | assert p1 is not p2 if a != b else p1 is p2
72 |
73 | shared_dict = {}
74 | p1_shared_dict = pd(shared_dict)
75 | p2_shared_dict = pd(shared_dict)
76 | assert p1_shared_dict.data is p2_shared_dict.data
77 |
78 | # Deep copy should have its own value
79 | copy = pd(shared_dict).deepcopy()
80 | assert copy.get() is not p1_shared_dict.get()
81 |
82 |
83 | def test__repr__():
84 | j = {"1": 2}
85 | assert str(pd(j)) == 'PathDict({\n "1": 2\n}, self.path_handle = Path(path=[], raw=False))'
86 |
87 |
88 | def test_reset_at_after_in():
89 | j = {"1": {"2": 3}}
90 | p = pd(j)
91 | assert "1" in p
92 | assert p.get() == j
93 |
94 |
95 | def test_deepcopy():
96 | j = {"1": {"2": 3}}
97 | assert pd(j).at("1").get() is j["1"]
98 |
99 | # deepcopy at root
100 | assert pd(j).deepcopy().get() == j
101 | assert pd(j).deepcopy().get() is not j
102 |
103 | # deepcopy at path
104 | assert pd(j).at("1").deepcopy().get() is not j["1"]
105 | assert pd(j).at("1").deepcopy().get() == j["1"]
106 | assert pd(j).deepcopy().at("1").get() is not j["1"]
107 |
108 | # deepcopy from root at path
109 | assert pd(j).at("1").deepcopy(from_root=True).at().get() == j
110 | assert pd(j).at("1").deepcopy(from_root=True).at().get() is not j
111 | assert pd(j).at("1").deepcopy(from_root=True).get() == j
112 |
113 | # Nested
114 | users = dummy_data.get_users()
115 | dc_pd = pd(users).deepcopy()
116 | assert dc_pd.data is not users
117 | dc_pd_copy = dc_pd.deepcopy()
118 | assert dc_pd is not dc_pd_copy
119 |
120 |
121 | def test_copy():
122 | j = {"1": {"2": 3}}
123 | assert pd(j).at("1").get() is j["1"]
124 |
125 | assert pd(j).copy().get() == j
126 | assert pd(j).copy().get() is not j
127 |
128 | assert pd(j).at("1").copy().get() is not j["1"]
129 | assert pd(j).at("1").copy().get() == j["1"]
130 | assert pd(j).copy().at("1").get() is j["1"]
131 |
132 | assert pd(j).at("1").copy(from_root=True).at().get() == j
133 |
134 | # Nested
135 | users = dummy_data.get_users()
136 | dc_pd = pd(users).copy()
137 | assert dc_pd.data is not users
138 | dc_pd_copy = dc_pd.copy()
139 | assert dc_pd is not dc_pd_copy
140 |
141 |
142 | def test_contains():
143 | users_dict = dummy_data.get_users()
144 | users_pd = pd(users_dict)
145 | assert "total_users" in users_pd
146 | assert ["premium_users", 1] in users_pd
147 | assert "premium_users", "1" in users_pd
148 | assert "premium_users", "1" in users_pd
149 | assert ["premium_users", "44"] not in users_pd
150 | assert ["users", "1"] in users_pd
151 | assert ["users", "999999"] not in users_pd
152 | assert ["users", "1", "name"] in users_pd
153 | assert ["users", "999999", "name"] not in users_pd
154 | assert ["users", "1", "name", "joe"] not in users_pd
155 | assert ["users", "1", "name", "joe", "Doe"] not in users_pd # too many paths
156 |
157 |
158 | def test_nested_object_copy():
159 | # Test copy with object
160 | class TestObject:
161 | def __init__(self, data):
162 | self.data = data
163 |
164 | def __repr__(self):
165 | return f"TestObject({self.data})"
166 |
167 | o = pd({})
168 | o["test", "test"] = TestObject({"1": "2"})
169 | assert str(o.get()) == """{'test': {'test': TestObject({'1': '2'})}}"""
170 |
171 | # True deepcopy
172 | tdc = o.deepcopy(true_deepcopy=True)
173 | # The copy has the same str representation
174 | assert str(tdc.get()) == str(o.get())
175 | # It is still a TestObject
176 | assert type(tdc.at("test", "test").get()) == TestObject
177 | # But not the same object
178 | assert tdc.at("test").get() is not o.at("test").get()
179 | assert tdc.at("test", "test").get() is not o.at("test", "test").get()
180 |
181 | # Fast deepcopy
182 | fdc = o.at().deepcopy()
183 | # The copy has the same str representation
184 | assert str(fdc.get()) == str(o.get())
185 | # It is still a TestObject
186 | assert type(fdc.at("test", "test").get()) == TestObject
187 | # But not the same object
188 | assert fdc.at("test").get() is not o.at("test").get()
189 | assert fdc.at("test", "test").get() is o.at("test", "test").get()
190 |
191 |
192 | def test_get_path():
193 | users_dict = dummy_data.get_users()
194 | users_pd = pd(users_dict)
195 | assert users_pd["total_users"] == 3
196 | assert users_pd["users", "1", "name"] == "Joe"
197 | # Non existent but correct paths return None
198 | assert users_pd["users", "-1", "name"] is None
199 |
200 | assert users_pd[2] is None
201 | # If value is not a dict, return that value
202 | assert isinstance(users_pd["follows"], list)
203 | # If value is a dict, return a PathDict
204 | assert users_pd["users"] is users_dict["users"]
205 | # Wrong path accesses, eg. get key on list, raise an exception
206 | with pytest.raises(KeyError):
207 | print(users_pd["follows", "not_correct"])
208 | # Get at list
209 | assert users_pd[["follows", 0]] == ["Ben", "Sue"]
210 |
211 | assert users_pd["users", "1", "*"] == ["Joe", 22]
212 |
213 |
214 | def test_set_path():
215 | assert pd(["1", 2]).set([3, "4"]).get() == [3, "4"]
216 |
217 | j = {"u1": "Ben", "u2": "Sue"}
218 | p = pd(j)
219 |
220 | p.set(None).get() == j
221 |
222 | # Replace entire dict
223 | p.at().set({"u3": "Joe"})
224 | assert j == {"u3": "Joe"}
225 |
226 | # Cover specific KeyError
227 | with pytest.raises(KeyError):
228 | p.at("u3", "meta", "age").set(22)
229 |
230 | with pytest.raises(TypeError):
231 | p.at().set("Not Allowed")
232 |
233 | p = pd({"l1": [1, 2, 3]})
234 | with pytest.raises(KeyError):
235 | p.at(["l1", "nonexistent"]).set(4)
236 | with pytest.raises(KeyError):
237 | p.at(["l1", "nonexistent", "also_nonexistent"]).set(4)
238 |
239 |
240 | def test_map():
241 | j = {"1": {"2": 3}}
242 | assert pd(j).at("1", "2").map(lambda x: x + 1).get() == 4
243 | assert pd(j).at("1", "6", "7").map(lambda x: (x or 0) + 1).get() == 1
244 | assert pd(j).at("1", "6", "7").map(lambda x: (x or 0) + 1).get() == 2
245 | assert j["1"]["2"] == 4
246 | assert j["1"]["6"]["7"] == 2
247 | with pytest.raises(TypeError):
248 | pd(j).at("1", "99", "99").map(lambda x: x + 1)
249 |
250 |
251 | def test_mapped():
252 | j = {"1": {"2": 3}, "a": {"b": "c"}}
253 |
254 | p = pd(j).at("1", "2").mapped(lambda x: x + 1).at().get()
255 | p2 = pd(j).deepcopy().at("1", "2").map(lambda x: x + 1).at().get()
256 |
257 | assert j["1"]["2"] == 3
258 | assert p["1"]["2"] == 4
259 | assert p2["1"]["2"] == 4
260 |
261 | assert j["a"] == p["a"]
262 | assert j["a"] is not p["a"]
263 |
264 |
265 | def test_append():
266 | p = pd({})
267 | p.at("1", "2").append(3)
268 | assert p.at().get() == {"1": {"2": [3]}}
269 | with pytest.raises(TypeError):
270 | p.at("1").append(2)
271 |
272 |
273 | def test_update():
274 | p = pd({"a": 1})
275 | p.update({"b": 2, "a": 2})
276 | assert p.get() == {"a": 2, "b": 2}
277 |
278 |
279 | def test_filter_behavior_spec():
280 | j = {
281 | "a": "b",
282 | "1": {
283 | "2": "20",
284 | "3": "30",
285 | "4": "40",
286 | },
287 | }
288 | p = pd(j)
289 | p.at("1").filter(lambda k, v: int(k) > 3)
290 |
291 | assert j == {"a": "b", "1": {"4": "40"}}
292 | assert p.get() == {"4": "40"}
293 | assert p.at().get() == {"a": "b", "1": {"4": "40"}}
294 |
295 | with pytest.raises(TypeError):
296 | p.at("a").filter(lambda x: x)
297 |
298 |
299 | def test_filter():
300 | users_pd = pd(dummy_data.get_users())
301 |
302 | users_below_30 = users_pd.deepcopy().at("users").filtered(lambda k, v: v["age"] <= 30)
303 | assert users_below_30.get() == {"1": {"age": 22, "name": "Joe"}}
304 |
305 | premium_users = users_pd.deepcopy().at("users").filtered(lambda k, v: int(k) in users_pd["premium_users"])
306 | assert premium_users.get() == {"1": {"age": 22, "name": "Joe"}, "3": {"age": 32, "name": "Sue"}}
307 |
308 | follows_includes_joe = users_pd.at("follows").filtered(lambda e: "Joe" in e)
309 | assert isinstance(follows_includes_joe.get(), list)
310 | assert follows_includes_joe.get() == [
311 | ["Joe", "Ben"],
312 | ["Ben", "Joe"],
313 | ]
314 |
315 |
316 | def test_reduce():
317 | users_pd = pd(dummy_data.get_users())
318 |
319 | users_ages = users_pd.at("users").reduce(lambda k, v, a: a + v["age"], aggregate=0)
320 | assert users_ages == 103
321 |
322 | p = pd({"l1": [1, 2, 3]})
323 | assert p.at("l1").reduce(lambda v, a: a + v, aggregate=10) == 16
324 |
325 | p = pd({"l1": "abc"})
326 | with pytest.raises(TypeError):
327 | p.at("l1").reduce(lambda v, a: a + v, aggregate=0)
328 |
329 | p = pd(dummy_data.get_users())
330 | assert p.at("users", "*", "name").reduce(lambda v, a: a + [v], aggregate=[]) == ["Joe", "Ben", "Sue"]
331 |
332 |
333 | def test_keys():
334 | p = pd({"1": {"2": [3]}})
335 | assert p.keys() == ["1"]
336 | assert p.at("1").keys() == ["2"]
337 | with pytest.raises(AttributeError):
338 | p.at("1", "2").keys()
339 |
340 |
341 | def test_values():
342 | p = pd({"1": {"2": [3]}})
343 | assert p.values() == [{"2": [3]}]
344 | assert p.at("1").values() == [[3]]
345 | with pytest.raises(AttributeError):
346 | p.at("1", "2").values()
347 |
348 |
349 | def test_items():
350 | p = pd({"1": {"2": [3]}})
351 | assert list(p.items()) == [("1", {"2": [3]})]
352 | assert list(p.at("1").items()) == [("2", [3])]
353 | with pytest.raises(AttributeError):
354 | p.at("1", "2").items()
355 |
356 |
357 | def test__len__():
358 | p = pd({"1": {"2": [3, 1]}})
359 | assert len(p) == 1
360 | assert len(p.at("1")) == 1
361 | assert len(p.at("1", "2")) == 2
362 |
363 |
364 | def test_pop():
365 | p = pd({"1": {"2": [3, 1]}, "4": 4})
366 | assert p.pop("1") == {"2": [3, 1]}
367 | assert p.get() == {"4": 4}
368 | assert p.pop("1") is None
369 | assert p.pop("1", 2) == 2
370 |
371 |
372 | def test_iter():
373 | p = pd({"a": 1, "b": 2, "c": 3})
374 |
375 | keys = []
376 | for k in p:
377 | keys.append(k)
378 | assert keys == ["a", "b", "c"]
379 |
--------------------------------------------------------------------------------
/path_dict/path_dict.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | import copy
4 | import json
5 | from typing import Any, Callable, Union
6 |
7 | from . import utils
8 | from .path import Path
9 |
10 |
11 | class PathDict:
12 | root_data: dict | list | Any
13 | data: dict | list | Any
14 | path_handle: Path
15 |
16 | def __init__(self, data: dict | list, raw=False, path: Path = None):
17 | """
18 | A PathDict always refers to a dict or list.
19 | It is used to get data or perform operations at a given path.
20 | When initialized, the current path is the root path.
21 | """
22 | if not isinstance(data, (dict, list)):
23 | raise TypeError(f"PathDict init: data must be dict or list but is {type(data)} " f"({data})")
24 | self.data = data
25 | self.path_handle = Path([], raw=raw) if path is None else path
26 |
27 | @classmethod
28 | def from_data_and_path(cls, data: dict | list, path: Path) -> PathDict:
29 | """
30 | Alternative constructor for PathDict.
31 | A PathDict always refers to a dict or list.
32 | It is used to get data or perform operations at a given path.
33 | When initialized, the current path is the root path.
34 | """
35 | return cls(data=data, path=path)
36 |
37 | def __repr__(self) -> str:
38 | return f"PathDict({json.dumps(self.data, indent=4, sort_keys=True)}, {self.path_handle = })"
39 |
40 | def deepcopy(self, from_root=False, true_deepcopy=False) -> PathDict:
41 | """
42 | Return a deep copy of the data at the current path or from the root.
43 |
44 | Args:
45 | - `from_root` - If `True`, the copy will not be made at the root data, and not where the current path is. The path handle be moved to the root. If `False`, only the part of the data where the current path handle is at will be copied.
46 | - `true_deepcopy` - If True, the copy will be made using `copy.deepcopy`. If False, the copy recusively copy dicts, lists, tuples and sets, but will not copy other types of objects.
47 |
48 | Returns:
49 | - A handle on the newly created copy
50 | """
51 | path = self.path_handle.copy(replace_path=[])
52 | data = self.data if from_root else self.get()
53 | data_copy = copy.deepcopy(data) if true_deepcopy else utils.fast_deepcopy(data)
54 | return PathDict.from_data_and_path(data_copy, path)
55 |
56 | def copy(self, from_root=False) -> PathDict:
57 | """
58 | Return a shallow copy of the data at the current path or from the root.
59 |
60 | Args:
61 | - `from_root` - If `True`, the copy will not be made at the root data, and not where the current path is. The path handle be moved to the root. If `False`, only the part of the data where the current path handle is at will be copied.
62 |
63 | Returns:
64 | - A handle on the newly created copy
65 | """
66 | path = self.path_handle.copy(replace_path=[])
67 | data_copy = copy.copy(self.data if from_root else self.get())
68 | return PathDict.from_data_and_path(data_copy, path)
69 |
70 | ############################################################################
71 | # Moving the handle
72 | ############################################################################
73 |
74 | def at(self, *path, raw=None) -> Union[PathDict, MultiPathDict]:
75 | """
76 | Calling at(path) moves the handle to the given path, and returns the
77 | handle.
78 |
79 | A path can be a string, a list or a tuple. For example, the following
80 | are equivalent:
81 | >>> d = {"a": {"b": {"c": 1}}}
82 | >>> pd(d).at(["a", "b", "c"]).get() # -> 1
83 | >>> pd(d).at("a", "b", "c").get() # -> 1
84 |
85 | The path can also contain wildcards (*) to select everything at a given
86 | level, and [a|b|c] to select multiple keys at a given level.
87 | In this case, the result is a MultiPathDict, which can perform
88 | operations on all the selected elements at once.
89 |
90 | :param path: The path to move to.
91 | :param raw: If True, the path is not parsed, and is used as is. For
92 | example, "*" will not be interpreted as a wildcard, but as a usual key.
93 | """
94 |
95 | raw = self.path_handle.raw if raw is None else raw
96 | self.path_handle = Path(*path, raw=raw)
97 |
98 | if self.path_handle.has_wildcards:
99 | return MultiPathDict(self.data, self.path_handle)
100 | return self
101 |
102 | def at_root(self) -> PathDict:
103 | """
104 | Move the handle back to the root of the data and return it.
105 | Equivalent to
106 | >>> .at()
107 |
108 | Useful if you are in a nested handle but also want to do something on the root data.
109 |
110 | Example:
111 | >>> d = {"a": {"b": {"c": 1}}}
112 | >>> pd(d).at("a", "b").filter(lambda k,v: v > 1).root().filter(lambda k,v: k == "a").get()
113 | """
114 | return self.at()
115 |
116 | def at_parent(self) -> PathDict:
117 | """
118 | Move the handle to the parent of the current path and return it.
119 | """
120 | return self.at(self.path_handle.path[:-1])
121 |
122 | def at_children(self) -> MultiPathDict:
123 | """
124 | Return a MultiPathDict that refers to all the children of the current
125 | path.
126 | """
127 | return self.at(self.path_handle.path + ["*"])
128 |
129 | ############################################################################
130 | # Getters
131 | # Getters ALWAYS return actual values, not handles.
132 | ############################################################################
133 |
134 | def get(self, default=None) -> dict | list | Any:
135 | """
136 | Get the actual value at the given path.
137 | If the path is vaild but does not exist, return None (default).
138 | If the path is invalid (e.g. accessing string key on list), raise an
139 | error.
140 |
141 | Example:
142 | >>> d = {"a": {"b": {"c": [1]}}}
143 | >>> pd(d).at("a", "b", "c").get() # -> [1] (Valid path)
144 | >>> pd(d).at("a", "b", "d").get() # -> None (Valid path, but does not exist)
145 | >>> pd(d).at("a", "b", "c", "d").get() # -> KeyError (Invalid path - cannot get key "d" on a list)
146 | >>> pd(d).at("a", "b", "c", "0").get() # -> 1 (Valid path)
147 |
148 | Shorthand syntax:
149 | >>> pd(d).at("a", "b", "c")[:]
150 | You can use [:] to get the value at the current path.
151 | Beware: using the subscript [...] will move the handle back to the root
152 | of the data.
153 |
154 | :param default: The default value to return if the path is valid but
155 | does not exist.
156 | """
157 | # Iterate over the path to safely get the value
158 | current = self.data
159 | for key in self.path_handle:
160 | current = utils.guarded_get(current, key)
161 | if current is None:
162 | return default
163 | return current
164 |
165 | ############################################################################
166 | # Setters
167 | # Setters ALWAYS return a handle, not the value.
168 | ############################################################################
169 |
170 | def set(self, value) -> PathDict:
171 | # Setting nothing is a no-op
172 | if value is None:
173 | return self
174 |
175 | # If handle is at root, replace the whole data
176 | if len(self.path_handle) == 0:
177 | if isinstance(self.data, dict) and isinstance(value, dict):
178 | self.data.clear()
179 | self.data.update(value)
180 | return self
181 | if isinstance(self.data, list) and isinstance(value, list):
182 | self.data.clear()
183 | self.data.extend(value)
184 | return self
185 | raise TypeError(
186 | "PathDict set: At the root level, you can only set dict dict or"
187 | f"list to a list (tried to set a {type(self.data)} to a "
188 | f"{type(value)})."
189 | )
190 |
191 | # Iterate over all keys except the last one
192 | current = self.data
193 | for key in self.path_handle[:-1]:
194 | current = utils.guarded_descent(current, key)
195 |
196 | key = self.path_handle[-1]
197 | if isinstance(current, dict):
198 | current[key] = value
199 | elif isinstance(current, list):
200 | try:
201 | current[int(key)] = value
202 | except (ValueError, IndexError, TypeError) as e:
203 | raise KeyError(f"PathDict set: invalid path {self.path_handle}") from e
204 |
205 | return self
206 |
207 | def map(self, f: Callable) -> PathDict:
208 | """
209 | Map the result of f to the value at path previously set by ".at(path)".
210 |
211 | :return: The handle itself for further operations.
212 | """
213 | self.set(f(self.get()))
214 | return self
215 |
216 | def mapped(self, f: Callable) -> PathDict:
217 | """
218 | Makes a fast deepcopy of your root data, moves the handle to the previously
219 | set path, applies map with f at that path, and returns the handle.
220 | """
221 | current_handle = self.path_handle
222 | return self.deepcopy(from_root=True).at(current_handle.path).map(f)
223 |
224 | ############################################################################
225 | # Filter
226 | # Filter ALWAYS return a handle, not the value.
227 | ############################################################################
228 |
229 | def filter(self, f: Callable) -> PathDict:
230 | """
231 | At the current path only keep the elements for which f(key, value)
232 | is True for dicts, or f(value) is True for lists.
233 | """
234 | get_at_current = self.get()
235 | if isinstance(get_at_current, dict):
236 | return self.set({k: v for k, v in get_at_current.items() if f(k, v)})
237 | if isinstance(get_at_current, list):
238 | return self.set([x for x in get_at_current if f(x)])
239 | raise TypeError("PathDict filter: must be applied to a dict or list")
240 |
241 | def filtered(self, f: Callable) -> PathDict:
242 | """
243 | Shortcut for:
244 | >>> copy().filter(f)
245 | """
246 | return self.copy().filter(f)
247 |
248 | ############################################################################
249 | # Reduce
250 | ############################################################################
251 |
252 | def reduce(self, f: Callable, aggregate=None) -> Any:
253 | """
254 | Reduce a value starting with init at the given path.
255 | If at the selected path is a dict, the function f will be called with
256 | (key, value, aggregate) as arguments.
257 | If at the selected path is a list, the function f will be called with
258 | (value, aggregate) as arguments.
259 | """
260 |
261 | agg = aggregate
262 | get_at_current = self.get()
263 | if isinstance(get_at_current, dict):
264 | for k, v in get_at_current.items():
265 | agg = f(k, v, agg)
266 | return agg
267 | if isinstance(get_at_current, list):
268 | for v in get_at_current:
269 | agg = f(v, agg)
270 | return agg
271 | raise TypeError("PathDict reduce: must be applied to a dict or list")
272 |
273 | ############################################################################
274 | #### Useful Shorthands
275 | ############################################################################
276 |
277 | def sum(self) -> Any:
278 | """
279 | Sum the elements at the given path.
280 | """
281 | get_at_current = self.get()
282 | if isinstance(get_at_current, dict):
283 | return sum(v for k, v in get_at_current.items())
284 | if isinstance(get_at_current, list):
285 | return sum(get_at_current)
286 | raise TypeError("PathDict sum: must be applied to a dict or list")
287 |
288 | def append(self, value) -> PathDict:
289 | """
290 | Append the value to the list at the given path.
291 | """
292 | return self.map(lambda l: (l or []) + [value])
293 |
294 | def update(self, value) -> PathDict:
295 | """
296 | Update the dict at the given path with the given value.
297 | """
298 | return self.map(lambda d: {**d, **value})
299 |
300 | ############################################################################
301 | #### Standard dict methods
302 | ############################################################################
303 |
304 | def keys(self):
305 | return list(self.get().keys())
306 |
307 | def values(self):
308 | return list(self.get().values())
309 |
310 | def items(self):
311 | return self.get().items()
312 |
313 | def pop(self, key, default=None):
314 | return self.get().pop(key, default)
315 |
316 | def __len__(self):
317 | return len(self.get())
318 |
319 | def __getitem__(self, path):
320 | at = self.at(*path) if isinstance(path, tuple) else self.at(path)
321 | if isinstance(at, MultiPathDict):
322 | return at.gather()
323 | res = at.get()
324 | self.at_root()
325 | return res
326 |
327 | def __setitem__(self, path, value):
328 | at = self.at(*path) if isinstance(path, tuple) else self.at(path)
329 | at.map(value) if callable(value) else at.set(value)
330 | self.at_root()
331 |
332 | def __contains__(self, *path):
333 | try:
334 | contains = self.at(*path).get() is not None
335 | self.at_root()
336 | return contains
337 | except KeyError:
338 | return False
339 |
340 | def __iter__(self):
341 | return iter(self.keys())
342 |
343 |
344 | # Import MultiPathDict at the end of the file to avoid circular imports
345 | from .multi_path_dict import MultiPathDict
346 |
--------------------------------------------------------------------------------
/poetry.lock:
--------------------------------------------------------------------------------
1 | # This file is automatically @generated by Poetry 1.4.0 and should not be changed by hand.
2 |
3 | [[package]]
4 | name = "attrs"
5 | version = "22.1.0"
6 | description = "Classes Without Boilerplate"
7 | category = "dev"
8 | optional = false
9 | python-versions = ">=3.5"
10 | files = [
11 | {file = "attrs-22.1.0-py2.py3-none-any.whl", hash = "sha256:86efa402f67bf2df34f51a335487cf46b1ec130d02b8d39fd248abfd30da551c"},
12 | {file = "attrs-22.1.0.tar.gz", hash = "sha256:29adc2665447e5191d0e7c568fde78b21f9672d344281d0c6e1ab085429b22b6"},
13 | ]
14 |
15 | [package.extras]
16 | dev = ["cloudpickle", "coverage[toml] (>=5.0.2)", "furo", "hypothesis", "mypy (>=0.900,!=0.940)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "sphinx", "sphinx-notfound-page", "zope.interface"]
17 | docs = ["furo", "sphinx", "sphinx-notfound-page", "zope.interface"]
18 | tests = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "zope.interface"]
19 | tests-no-zope = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins"]
20 |
21 | [[package]]
22 | name = "black"
23 | version = "23.1.0"
24 | description = "The uncompromising code formatter."
25 | category = "dev"
26 | optional = false
27 | python-versions = ">=3.7"
28 | files = [
29 | {file = "black-23.1.0-cp310-cp310-macosx_10_16_arm64.whl", hash = "sha256:b6a92a41ee34b883b359998f0c8e6eb8e99803aa8bf3123bf2b2e6fec505a221"},
30 | {file = "black-23.1.0-cp310-cp310-macosx_10_16_universal2.whl", hash = "sha256:57c18c5165c1dbe291d5306e53fb3988122890e57bd9b3dcb75f967f13411a26"},
31 | {file = "black-23.1.0-cp310-cp310-macosx_10_16_x86_64.whl", hash = "sha256:9880d7d419bb7e709b37e28deb5e68a49227713b623c72b2b931028ea65f619b"},
32 | {file = "black-23.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e6663f91b6feca5d06f2ccd49a10f254f9298cc1f7f49c46e498a0771b507104"},
33 | {file = "black-23.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:9afd3f493666a0cd8f8df9a0200c6359ac53940cbde049dcb1a7eb6ee2dd7074"},
34 | {file = "black-23.1.0-cp311-cp311-macosx_10_16_arm64.whl", hash = "sha256:bfffba28dc52a58f04492181392ee380e95262af14ee01d4bc7bb1b1c6ca8d27"},
35 | {file = "black-23.1.0-cp311-cp311-macosx_10_16_universal2.whl", hash = "sha256:c1c476bc7b7d021321e7d93dc2cbd78ce103b84d5a4cf97ed535fbc0d6660648"},
36 | {file = "black-23.1.0-cp311-cp311-macosx_10_16_x86_64.whl", hash = "sha256:382998821f58e5c8238d3166c492139573325287820963d2f7de4d518bd76958"},
37 | {file = "black-23.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bf649fda611c8550ca9d7592b69f0637218c2369b7744694c5e4902873b2f3a"},
38 | {file = "black-23.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:121ca7f10b4a01fd99951234abdbd97728e1240be89fde18480ffac16503d481"},
39 | {file = "black-23.1.0-cp37-cp37m-macosx_10_16_x86_64.whl", hash = "sha256:a8471939da5e824b891b25751955be52ee7f8a30a916d570a5ba8e0f2eb2ecad"},
40 | {file = "black-23.1.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8178318cb74f98bc571eef19068f6ab5613b3e59d4f47771582f04e175570ed8"},
41 | {file = "black-23.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:a436e7881d33acaf2536c46a454bb964a50eff59b21b51c6ccf5a40601fbef24"},
42 | {file = "black-23.1.0-cp38-cp38-macosx_10_16_arm64.whl", hash = "sha256:a59db0a2094d2259c554676403fa2fac3473ccf1354c1c63eccf7ae65aac8ab6"},
43 | {file = "black-23.1.0-cp38-cp38-macosx_10_16_universal2.whl", hash = "sha256:0052dba51dec07ed029ed61b18183942043e00008ec65d5028814afaab9a22fd"},
44 | {file = "black-23.1.0-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:49f7b39e30f326a34b5c9a4213213a6b221d7ae9d58ec70df1c4a307cf2a1580"},
45 | {file = "black-23.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:162e37d49e93bd6eb6f1afc3e17a3d23a823042530c37c3c42eeeaf026f38468"},
46 | {file = "black-23.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:8b70eb40a78dfac24842458476135f9b99ab952dd3f2dab738c1881a9b38b753"},
47 | {file = "black-23.1.0-cp39-cp39-macosx_10_16_arm64.whl", hash = "sha256:a29650759a6a0944e7cca036674655c2f0f63806ddecc45ed40b7b8aa314b651"},
48 | {file = "black-23.1.0-cp39-cp39-macosx_10_16_universal2.whl", hash = "sha256:bb460c8561c8c1bec7824ecbc3ce085eb50005883a6203dcfb0122e95797ee06"},
49 | {file = "black-23.1.0-cp39-cp39-macosx_10_16_x86_64.whl", hash = "sha256:c91dfc2c2a4e50df0026f88d2215e166616e0c80e86004d0003ece0488db2739"},
50 | {file = "black-23.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2a951cc83ab535d248c89f300eccbd625e80ab880fbcfb5ac8afb5f01a258ac9"},
51 | {file = "black-23.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:0680d4380db3719ebcfb2613f34e86c8e6d15ffeabcf8ec59355c5e7b85bb555"},
52 | {file = "black-23.1.0-py3-none-any.whl", hash = "sha256:7a0f701d314cfa0896b9001df70a530eb2472babb76086344e688829efd97d32"},
53 | {file = "black-23.1.0.tar.gz", hash = "sha256:b0bd97bea8903f5a2ba7219257a44e3f1f9d00073d6cc1add68f0beec69692ac"},
54 | ]
55 |
56 | [package.dependencies]
57 | click = ">=8.0.0"
58 | mypy-extensions = ">=0.4.3"
59 | packaging = ">=22.0"
60 | pathspec = ">=0.9.0"
61 | platformdirs = ">=2"
62 | tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""}
63 | typing-extensions = {version = ">=3.10.0.0", markers = "python_version < \"3.10\""}
64 |
65 | [package.extras]
66 | colorama = ["colorama (>=0.4.3)"]
67 | d = ["aiohttp (>=3.7.4)"]
68 | jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"]
69 | uvloop = ["uvloop (>=0.15.2)"]
70 |
71 | [[package]]
72 | name = "click"
73 | version = "8.1.3"
74 | description = "Composable command line interface toolkit"
75 | category = "dev"
76 | optional = false
77 | python-versions = ">=3.7"
78 | files = [
79 | {file = "click-8.1.3-py3-none-any.whl", hash = "sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48"},
80 | {file = "click-8.1.3.tar.gz", hash = "sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e"},
81 | ]
82 |
83 | [package.dependencies]
84 | colorama = {version = "*", markers = "platform_system == \"Windows\""}
85 |
86 | [[package]]
87 | name = "colorama"
88 | version = "0.4.6"
89 | description = "Cross-platform colored terminal text."
90 | category = "dev"
91 | optional = false
92 | python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7"
93 | files = [
94 | {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"},
95 | {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"},
96 | ]
97 |
98 | [[package]]
99 | name = "coverage"
100 | version = "6.5.0"
101 | description = "Code coverage measurement for Python"
102 | category = "dev"
103 | optional = false
104 | python-versions = ">=3.7"
105 | files = [
106 | {file = "coverage-6.5.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ef8674b0ee8cc11e2d574e3e2998aea5df5ab242e012286824ea3c6970580e53"},
107 | {file = "coverage-6.5.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:784f53ebc9f3fd0e2a3f6a78b2be1bd1f5575d7863e10c6e12504f240fd06660"},
108 | {file = "coverage-6.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b4a5be1748d538a710f87542f22c2cad22f80545a847ad91ce45e77417293eb4"},
109 | {file = "coverage-6.5.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:83516205e254a0cb77d2d7bb3632ee019d93d9f4005de31dca0a8c3667d5bc04"},
110 | {file = "coverage-6.5.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af4fffaffc4067232253715065e30c5a7ec6faac36f8fc8d6f64263b15f74db0"},
111 | {file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:97117225cdd992a9c2a5515db1f66b59db634f59d0679ca1fa3fe8da32749cae"},
112 | {file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a1170fa54185845505fbfa672f1c1ab175446c887cce8212c44149581cf2d466"},
113 | {file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:11b990d520ea75e7ee8dcab5bc908072aaada194a794db9f6d7d5cfd19661e5a"},
114 | {file = "coverage-6.5.0-cp310-cp310-win32.whl", hash = "sha256:5dbec3b9095749390c09ab7c89d314727f18800060d8d24e87f01fb9cfb40b32"},
115 | {file = "coverage-6.5.0-cp310-cp310-win_amd64.whl", hash = "sha256:59f53f1dc5b656cafb1badd0feb428c1e7bc19b867479ff72f7a9dd9b479f10e"},
116 | {file = "coverage-6.5.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4a5375e28c5191ac38cca59b38edd33ef4cc914732c916f2929029b4bfb50795"},
117 | {file = "coverage-6.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4ed2820d919351f4167e52425e096af41bfabacb1857186c1ea32ff9983ed75"},
118 | {file = "coverage-6.5.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:33a7da4376d5977fbf0a8ed91c4dffaaa8dbf0ddbf4c8eea500a2486d8bc4d7b"},
119 | {file = "coverage-6.5.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8fb6cf131ac4070c9c5a3e21de0f7dc5a0fbe8bc77c9456ced896c12fcdad91"},
120 | {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a6b7d95969b8845250586f269e81e5dfdd8ff828ddeb8567a4a2eaa7313460c4"},
121 | {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:1ef221513e6f68b69ee9e159506d583d31aa3567e0ae84eaad9d6ec1107dddaa"},
122 | {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cca4435eebea7962a52bdb216dec27215d0df64cf27fc1dd538415f5d2b9da6b"},
123 | {file = "coverage-6.5.0-cp311-cp311-win32.whl", hash = "sha256:98e8a10b7a314f454d9eff4216a9a94d143a7ee65018dd12442e898ee2310578"},
124 | {file = "coverage-6.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:bc8ef5e043a2af066fa8cbfc6e708d58017024dc4345a1f9757b329a249f041b"},
125 | {file = "coverage-6.5.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4433b90fae13f86fafff0b326453dd42fc9a639a0d9e4eec4d366436d1a41b6d"},
126 | {file = "coverage-6.5.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f4f05d88d9a80ad3cac6244d36dd89a3c00abc16371769f1340101d3cb899fc3"},
127 | {file = "coverage-6.5.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:94e2565443291bd778421856bc975d351738963071e9b8839ca1fc08b42d4bef"},
128 | {file = "coverage-6.5.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:027018943386e7b942fa832372ebc120155fd970837489896099f5cfa2890f79"},
129 | {file = "coverage-6.5.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:255758a1e3b61db372ec2736c8e2a1fdfaf563977eedbdf131de003ca5779b7d"},
130 | {file = "coverage-6.5.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:851cf4ff24062c6aec510a454b2584f6e998cada52d4cb58c5e233d07172e50c"},
131 | {file = "coverage-6.5.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:12adf310e4aafddc58afdb04d686795f33f4d7a6fa67a7a9d4ce7d6ae24d949f"},
132 | {file = "coverage-6.5.0-cp37-cp37m-win32.whl", hash = "sha256:b5604380f3415ba69de87a289a2b56687faa4fe04dbee0754bfcae433489316b"},
133 | {file = "coverage-6.5.0-cp37-cp37m-win_amd64.whl", hash = "sha256:4a8dbc1f0fbb2ae3de73eb0bdbb914180c7abfbf258e90b311dcd4f585d44bd2"},
134 | {file = "coverage-6.5.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d900bb429fdfd7f511f868cedd03a6bbb142f3f9118c09b99ef8dc9bf9643c3c"},
135 | {file = "coverage-6.5.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2198ea6fc548de52adc826f62cb18554caedfb1d26548c1b7c88d8f7faa8f6ba"},
136 | {file = "coverage-6.5.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c4459b3de97b75e3bd6b7d4b7f0db13f17f504f3d13e2a7c623786289dd670e"},
137 | {file = "coverage-6.5.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:20c8ac5386253717e5ccc827caad43ed66fea0efe255727b1053a8154d952398"},
138 | {file = "coverage-6.5.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b07130585d54fe8dff3d97b93b0e20290de974dc8177c320aeaf23459219c0b"},
139 | {file = "coverage-6.5.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:dbdb91cd8c048c2b09eb17713b0c12a54fbd587d79adcebad543bc0cd9a3410b"},
140 | {file = "coverage-6.5.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:de3001a203182842a4630e7b8d1a2c7c07ec1b45d3084a83d5d227a3806f530f"},
141 | {file = "coverage-6.5.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e07f4a4a9b41583d6eabec04f8b68076ab3cd44c20bd29332c6572dda36f372e"},
142 | {file = "coverage-6.5.0-cp38-cp38-win32.whl", hash = "sha256:6d4817234349a80dbf03640cec6109cd90cba068330703fa65ddf56b60223a6d"},
143 | {file = "coverage-6.5.0-cp38-cp38-win_amd64.whl", hash = "sha256:7ccf362abd726b0410bf8911c31fbf97f09f8f1061f8c1cf03dfc4b6372848f6"},
144 | {file = "coverage-6.5.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:633713d70ad6bfc49b34ead4060531658dc6dfc9b3eb7d8a716d5873377ab745"},
145 | {file = "coverage-6.5.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:95203854f974e07af96358c0b261f1048d8e1083f2de9b1c565e1be4a3a48cfc"},
146 | {file = "coverage-6.5.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b9023e237f4c02ff739581ef35969c3739445fb059b060ca51771e69101efffe"},
147 | {file = "coverage-6.5.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:265de0fa6778d07de30bcf4d9dc471c3dc4314a23a3c6603d356a3c9abc2dfcf"},
148 | {file = "coverage-6.5.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f830ed581b45b82451a40faabb89c84e1a998124ee4212d440e9c6cf70083e5"},
149 | {file = "coverage-6.5.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7b6be138d61e458e18d8e6ddcddd36dd96215edfe5f1168de0b1b32635839b62"},
150 | {file = "coverage-6.5.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:42eafe6778551cf006a7c43153af1211c3aaab658d4d66fa5fcc021613d02518"},
151 | {file = "coverage-6.5.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:723e8130d4ecc8f56e9a611e73b31219595baa3bb252d539206f7bbbab6ffc1f"},
152 | {file = "coverage-6.5.0-cp39-cp39-win32.whl", hash = "sha256:d9ecf0829c6a62b9b573c7bb6d4dcd6ba8b6f80be9ba4fc7ed50bf4ac9aecd72"},
153 | {file = "coverage-6.5.0-cp39-cp39-win_amd64.whl", hash = "sha256:fc2af30ed0d5ae0b1abdb4ebdce598eafd5b35397d4d75deb341a614d333d987"},
154 | {file = "coverage-6.5.0-pp36.pp37.pp38-none-any.whl", hash = "sha256:1431986dac3923c5945271f169f59c45b8802a114c8f548d611f2015133df77a"},
155 | {file = "coverage-6.5.0.tar.gz", hash = "sha256:f642e90754ee3e06b0e7e51bce3379590e76b7f76b708e1a71ff043f87025c84"},
156 | ]
157 |
158 | [package.dependencies]
159 | tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""}
160 |
161 | [package.extras]
162 | toml = ["tomli"]
163 |
164 | [[package]]
165 | name = "exceptiongroup"
166 | version = "1.0.1"
167 | description = "Backport of PEP 654 (exception groups)"
168 | category = "dev"
169 | optional = false
170 | python-versions = ">=3.7"
171 | files = [
172 | {file = "exceptiongroup-1.0.1-py3-none-any.whl", hash = "sha256:4d6c0aa6dd825810941c792f53d7b8d71da26f5e5f84f20f9508e8f2d33b140a"},
173 | {file = "exceptiongroup-1.0.1.tar.gz", hash = "sha256:73866f7f842ede6cb1daa42c4af078e2035e5f7607f0e2c762cc51bb31bbe7b2"},
174 | ]
175 |
176 | [package.extras]
177 | test = ["pytest (>=6)"]
178 |
179 | [[package]]
180 | name = "iniconfig"
181 | version = "1.1.1"
182 | description = "iniconfig: brain-dead simple config-ini parsing"
183 | category = "dev"
184 | optional = false
185 | python-versions = "*"
186 | files = [
187 | {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"},
188 | {file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"},
189 | ]
190 |
191 | [[package]]
192 | name = "mypy-extensions"
193 | version = "0.4.3"
194 | description = "Experimental type system extensions for programs checked with the mypy typechecker."
195 | category = "dev"
196 | optional = false
197 | python-versions = "*"
198 | files = [
199 | {file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"},
200 | {file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"},
201 | ]
202 |
203 | [[package]]
204 | name = "packaging"
205 | version = "23.0"
206 | description = "Core utilities for Python packages"
207 | category = "dev"
208 | optional = false
209 | python-versions = ">=3.7"
210 | files = [
211 | {file = "packaging-23.0-py3-none-any.whl", hash = "sha256:714ac14496c3e68c99c29b00845f7a2b85f3bb6f1078fd9f72fd20f0570002b2"},
212 | {file = "packaging-23.0.tar.gz", hash = "sha256:b6ad297f8907de0fa2fe1ccbd26fdaf387f5f47c7275fedf8cce89f99446cf97"},
213 | ]
214 |
215 | [[package]]
216 | name = "pathspec"
217 | version = "0.10.2"
218 | description = "Utility library for gitignore style pattern matching of file paths."
219 | category = "dev"
220 | optional = false
221 | python-versions = ">=3.7"
222 | files = [
223 | {file = "pathspec-0.10.2-py3-none-any.whl", hash = "sha256:88c2606f2c1e818b978540f73ecc908e13999c6c3a383daf3705652ae79807a5"},
224 | {file = "pathspec-0.10.2.tar.gz", hash = "sha256:8f6bf73e5758fd365ef5d58ce09ac7c27d2833a8d7da51712eac6e27e35141b0"},
225 | ]
226 |
227 | [[package]]
228 | name = "platformdirs"
229 | version = "2.5.4"
230 | description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"."
231 | category = "dev"
232 | optional = false
233 | python-versions = ">=3.7"
234 | files = [
235 | {file = "platformdirs-2.5.4-py3-none-any.whl", hash = "sha256:af0276409f9a02373d540bf8480021a048711d572745aef4b7842dad245eba10"},
236 | {file = "platformdirs-2.5.4.tar.gz", hash = "sha256:1006647646d80f16130f052404c6b901e80ee4ed6bef6792e1f238a8969106f7"},
237 | ]
238 |
239 | [package.extras]
240 | docs = ["furo (>=2022.9.29)", "proselint (>=0.13)", "sphinx (>=5.3)", "sphinx-autodoc-typehints (>=1.19.4)"]
241 | test = ["appdirs (==1.4.4)", "pytest (>=7.2)", "pytest-cov (>=4)", "pytest-mock (>=3.10)"]
242 |
243 | [[package]]
244 | name = "pluggy"
245 | version = "1.0.0"
246 | description = "plugin and hook calling mechanisms for python"
247 | category = "dev"
248 | optional = false
249 | python-versions = ">=3.6"
250 | files = [
251 | {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"},
252 | {file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"},
253 | ]
254 |
255 | [package.extras]
256 | dev = ["pre-commit", "tox"]
257 | testing = ["pytest", "pytest-benchmark"]
258 |
259 | [[package]]
260 | name = "pycodestyle"
261 | version = "2.10.0"
262 | description = "Python style guide checker"
263 | category = "dev"
264 | optional = false
265 | python-versions = ">=3.6"
266 | files = [
267 | {file = "pycodestyle-2.10.0-py2.py3-none-any.whl", hash = "sha256:8a4eaf0d0495c7395bdab3589ac2db602797d76207242c17d470186815706610"},
268 | {file = "pycodestyle-2.10.0.tar.gz", hash = "sha256:347187bdb476329d98f695c213d7295a846d1152ff4fe9bacb8a9590b8ee7053"},
269 | ]
270 |
271 | [[package]]
272 | name = "pyinstrument"
273 | version = "4.4.0"
274 | description = "Call stack profiler for Python. Shows you why your code is slow!"
275 | category = "dev"
276 | optional = false
277 | python-versions = ">=3.7"
278 | files = [
279 | {file = "pyinstrument-4.4.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8874f8f58cfcb1ff134dc8e4a2b31ab9175adb271a4423596ed7ac8183592cf8"},
280 | {file = "pyinstrument-4.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e5f4d6e1c395f259f67a923a9c54dc3eaccd5f02540598da4f865c4bb3545762"},
281 | {file = "pyinstrument-4.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d70fed48ddd0078e287fb580daaeede4d8703a9edc8bf4f703308a77920bac37"},
282 | {file = "pyinstrument-4.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9fda1bd596e81ecd2b6a976eb9b930a757a5dd04071583d0141d059e34eed83f"},
283 | {file = "pyinstrument-4.4.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2f57b61d39d3b1a4d773da16baa8456aa66102d6016ce1f39817051550cbe47e"},
284 | {file = "pyinstrument-4.4.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:5a9aead0ca5579473f66fed4c449c693feee464802b5ba9b98772e64e02c575c"},
285 | {file = "pyinstrument-4.4.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:375a340c3fbebd922a35b0834de1c82d1b4fea681df49f99729439a6cb5e6ad4"},
286 | {file = "pyinstrument-4.4.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b9cbaf3bcda5ad9af4c9a7bf4f1b8f15bb32c4cadf554d0a2c723892c898021b"},
287 | {file = "pyinstrument-4.4.0-cp310-cp310-win32.whl", hash = "sha256:97cbeb5f5a048dc6eb047495f73db90c9e2ec97606e65298c7ea2c61fa52de38"},
288 | {file = "pyinstrument-4.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:73edbce7fda1b3d8cab0b6c39c43b012167d783c072f40928600c3357d1a5dc5"},
289 | {file = "pyinstrument-4.4.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:7db8cb55182883be48520eb915bd1769f176a4813ce0cc38243aa2d1182e7ce7"},
290 | {file = "pyinstrument-4.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7c614e2c241fb558a25973019ff43ce027ba4958bcb87383f0b0789af9c4d03b"},
291 | {file = "pyinstrument-4.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c012422c851f0457c3cb82d8b1259d96fa0dcddc0f1e8bf4d97f0b2efe54485"},
292 | {file = "pyinstrument-4.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e4f5ad100710dda68f9f345961780bf4f0cbb9fd3e46295d099bb9ad65b179ea"},
293 | {file = "pyinstrument-4.4.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9a4a053cd67102c6fcc313366ea6be97cfce7eae2b9e57e62c9be8adbbdebc17"},
294 | {file = "pyinstrument-4.4.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:1d2a1e53615c8ef210286e4d2d93be0d3e8296995b090df29a0b7ddeae5d874b"},
295 | {file = "pyinstrument-4.4.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:b2a6609ef74ad8ba292a11fbd975660bc86466c7eaab1ff11360d24e0300800b"},
296 | {file = "pyinstrument-4.4.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3643084ee8ad22d9ea2adb13d65d4b6e18810113e4176b19d026a011957f8c7c"},
297 | {file = "pyinstrument-4.4.0-cp311-cp311-win32.whl", hash = "sha256:fcd717910a8ab6deca353aded890403bbaea14a6dd99a87c3367f24721d2d6aa"},
298 | {file = "pyinstrument-4.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:be9ac54a4dd07d969d5941e4dcba67d5aef5f6826f43b9ddda65553816f6abca"},
299 | {file = "pyinstrument-4.4.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:39584c0fec147e3bbfa7b28454332f9801af5f93331f4143f24a4b0f9e3cb470"},
300 | {file = "pyinstrument-4.4.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5491a5cb3ae5e88436d48b4b3de8328286e843e7307116dc2cca397c9c2ffe21"},
301 | {file = "pyinstrument-4.4.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d66fcc94f0ebaab6bcbbdfa2482f833dd634352a20295616ea45286e990f7446"},
302 | {file = "pyinstrument-4.4.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b72bde0b1a03d1b2dc9b9d79546f551df6f67673cca816614e98ea0aebd3bc50"},
303 | {file = "pyinstrument-4.4.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:0bbd169b92147ec5d67ed160c300dda504059cfd81e953ed5b059e8ef92bb482"},
304 | {file = "pyinstrument-4.4.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:e5233022ba511ef7ecfef2e07d162c0817048c995f0940f9aa2f6a1936afcb9c"},
305 | {file = "pyinstrument-4.4.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:5e750fc3afb9acc288ad84b183a5ccd863e9185c435b445fcc62e0c133af9b7f"},
306 | {file = "pyinstrument-4.4.0-cp37-cp37m-win32.whl", hash = "sha256:2d131b98f116fb895d759dfb8c1078e0e9fa8987a9f44f566d29221545f75bd4"},
307 | {file = "pyinstrument-4.4.0-cp37-cp37m-win_amd64.whl", hash = "sha256:de83152bafc9eed4e5469e340b6002be825151f0654c32bbb9a3a7e31708d227"},
308 | {file = "pyinstrument-4.4.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:a7c774c4b8df21664b082d3e72fa8cbc0631fe9bb222bb9d285ccfe9cd9b4909"},
309 | {file = "pyinstrument-4.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7526f0b1dab721ddc19920fa1f4eeaa5bcb658a4d18ac9c50868e84f911f794b"},
310 | {file = "pyinstrument-4.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:59f5f479db277b3dbeb0c6843a7e9a38ee8b7c23d75b9ef764d96cb522d96212"},
311 | {file = "pyinstrument-4.4.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffeeaa0d036a8bef31da6fc13c4ea097160f913d86319897314113bb9271af4c"},
312 | {file = "pyinstrument-4.4.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bfc4e2fd670a570ea847f6897283d10d4b9606170e491f01b75488ed1aa37a81"},
313 | {file = "pyinstrument-4.4.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:ffd9a9fa73fd83a40252430c6ebf8dfff7c668cc68eab4a92562b8b27c302598"},
314 | {file = "pyinstrument-4.4.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:09167ece8802bc03a63e97536dcefd9c1a340dae686f40914cf995099bc0d0af"},
315 | {file = "pyinstrument-4.4.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:9e7d1cc3affef4a7e4695bb87c6cfcd577e2dac508624a91481f24217ef78c57"},
316 | {file = "pyinstrument-4.4.0-cp38-cp38-win32.whl", hash = "sha256:b50cf50513a5318738c3c7147f02596cda4891089acf2f627bb65954fc5bcbfd"},
317 | {file = "pyinstrument-4.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:dd9625cf136eb6684d9ca555a5088f21a7ac6c6cb2ece3ae45d09772906ceba8"},
318 | {file = "pyinstrument-4.4.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:a8afee175335005d2964848b77579bfc18f011ea74b59b79ab6d5b35433bf3e3"},
319 | {file = "pyinstrument-4.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ebc63b70845e3a44b673f7dcdc78ac2c475684db41b0402eea370f194da2a287"},
320 | {file = "pyinstrument-4.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb89033e41e74dc2ac4fd882269e91ddf677588efa665d2be8b718e96ea4cec6"},
321 | {file = "pyinstrument-4.4.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9b2bcd803d273c8addf01eaf75a42ae0a2a9196a58fb0ebb8d29be75abb88701"},
322 | {file = "pyinstrument-4.4.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:50727b686a0961a11eba2fe6205d0899f3479c983bcf34abb114d6da70bc1b93"},
323 | {file = "pyinstrument-4.4.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4f07941bb5dd5cd730fc84eef6497ef9f0807c686e68d0c6b1f464589646a3b7"},
324 | {file = "pyinstrument-4.4.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:30f5ce299c3219559870117c5b0825f33243808be375be9c3525572ba050c2db"},
325 | {file = "pyinstrument-4.4.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9a13c75b24bf8eed5a4356ffa8a419cc534284a529f2b314f3e10275a820420f"},
326 | {file = "pyinstrument-4.4.0-cp39-cp39-win32.whl", hash = "sha256:e5583b0d23f87631af06bb9f3c184190c889c194b02553eed132de966324bdf9"},
327 | {file = "pyinstrument-4.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:a19784a898133b7e0ffe4489155bacd2d07ec48ea059f9bf50033dc2b814c273"},
328 | {file = "pyinstrument-4.4.0.tar.gz", hash = "sha256:be34a2e8118c14a616a64538e02430d9099d5d67d8a370f2888e4ac71e52bbb7"},
329 | ]
330 |
331 | [package.extras]
332 | jupyter = ["ipython"]
333 |
334 | [[package]]
335 | name = "pytest"
336 | version = "7.2.0"
337 | description = "pytest: simple powerful testing with Python"
338 | category = "dev"
339 | optional = false
340 | python-versions = ">=3.7"
341 | files = [
342 | {file = "pytest-7.2.0-py3-none-any.whl", hash = "sha256:892f933d339f068883b6fd5a459f03d85bfcb355e4981e146d2c7616c21fef71"},
343 | {file = "pytest-7.2.0.tar.gz", hash = "sha256:c4014eb40e10f11f355ad4e3c2fb2c6c6d1919c73f3b5a433de4708202cade59"},
344 | ]
345 |
346 | [package.dependencies]
347 | attrs = ">=19.2.0"
348 | colorama = {version = "*", markers = "sys_platform == \"win32\""}
349 | exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""}
350 | iniconfig = "*"
351 | packaging = "*"
352 | pluggy = ">=0.12,<2.0"
353 | tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""}
354 |
355 | [package.extras]
356 | testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "xmlschema"]
357 |
358 | [[package]]
359 | name = "pytest-cov"
360 | version = "4.0.0"
361 | description = "Pytest plugin for measuring coverage."
362 | category = "dev"
363 | optional = false
364 | python-versions = ">=3.6"
365 | files = [
366 | {file = "pytest-cov-4.0.0.tar.gz", hash = "sha256:996b79efde6433cdbd0088872dbc5fb3ed7fe1578b68cdbba634f14bb8dd0470"},
367 | {file = "pytest_cov-4.0.0-py3-none-any.whl", hash = "sha256:2feb1b751d66a8bd934e5edfa2e961d11309dc37b73b0eabe73b5945fee20f6b"},
368 | ]
369 |
370 | [package.dependencies]
371 | coverage = {version = ">=5.2.1", extras = ["toml"]}
372 | pytest = ">=4.6"
373 |
374 | [package.extras]
375 | testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtualenv"]
376 |
377 | [[package]]
378 | name = "tomli"
379 | version = "2.0.1"
380 | description = "A lil' TOML parser"
381 | category = "dev"
382 | optional = false
383 | python-versions = ">=3.7"
384 | files = [
385 | {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"},
386 | {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"},
387 | ]
388 |
389 | [[package]]
390 | name = "typing-extensions"
391 | version = "4.4.0"
392 | description = "Backported and Experimental Type Hints for Python 3.7+"
393 | category = "dev"
394 | optional = false
395 | python-versions = ">=3.7"
396 | files = [
397 | {file = "typing_extensions-4.4.0-py3-none-any.whl", hash = "sha256:16fa4864408f655d35ec496218b85f79b3437c829e93320c7c9215ccfd92489e"},
398 | {file = "typing_extensions-4.4.0.tar.gz", hash = "sha256:1511434bb92bf8dd198c12b1cc812e800d4181cfcb867674e0f8279cc93087aa"},
399 | ]
400 |
401 | [metadata]
402 | lock-version = "2.0"
403 | python-versions = "^3.8"
404 | content-hash = "f5bf8751c2d50c7c2c5de4473253195895c95145a7d4b2e8a394c43773d9e274"
405 |
--------------------------------------------------------------------------------