├── .github └── workflows │ └── sanity_checks.yaml ├── .gitignore ├── .readthedocs.yaml ├── .vscode ├── launch.json └── settings.json ├── LICENSE.txt ├── MANIFEST.in ├── README.md ├── changelog.md ├── dictgest ├── __init__.py ├── cast.py ├── converter.py ├── routes.py ├── serdes.py └── utils.py ├── docs ├── Makefile ├── make.bat ├── requirements.txt └── source │ ├── README.md │ ├── api.rst │ ├── conf.py │ ├── dictgest_logo.jpg │ ├── ex1.png │ ├── ex2.png │ ├── ex3.png │ ├── index.rst │ ├── table2d.PNG │ └── table2d_transpose.PNG ├── examples ├── custom_conversion_example.py ├── extract_example.py ├── news_example.py ├── news_multi_adv_example.py ├── news_multi_example.py └── typeconvert_example.py ├── pylintrc ├── requirements-dev.txt ├── requirements.txt ├── setup.py └── tests ├── __init__.py ├── conftest.py ├── test_basic.py ├── test_convert.py ├── test_neg.py ├── test_path.py ├── test_route_template.py ├── test_table.py └── utils.py /.github/workflows/sanity_checks.yaml: -------------------------------------------------------------------------------- 1 | # This is a basic workflow to help you get started with Actions 2 | 3 | name: CI 4 | 5 | # Controls when the action will run. Triggers the workflow on push or pull request 6 | # events but only for the master branch 7 | on: 8 | push: 9 | branches: [main, development] 10 | pull_request: 11 | branches: [main, development] 12 | 13 | # A workflow run is made up of one or more jobs that can run sequentially or in parallel 14 | jobs: 15 | build: 16 | strategy: 17 | matrix: 18 | python-version: [3.9, 3.10.8] 19 | runs-on: ubuntu-latest 20 | 21 | # Steps represent a sequence of tasks that will be executed as part of the job 22 | steps: 23 | - name: Checkout 24 | uses: actions/checkout@v2 25 | with: 26 | fetch-depth: 0 27 | 28 | - name: Switch to Current Branch 29 | run: git checkout ${{ env.BRANCH }} 30 | 31 | - name: Set up Python ${{ matrix.python-version }} 32 | uses: actions/setup-python@v1 33 | with: 34 | python-version: ${{ matrix.python-version }} 35 | 36 | - name: Install dependencies 37 | run: | 38 | pip install -r requirements-dev.txt 39 | pip install -e . 40 | 41 | - name: run unit tests 42 | run: | 43 | pytest 44 | if: always() 45 | - name: run mypy static types check 46 | run: | 47 | mypy --install-types --non-interactive dictgest/*.py 48 | if: always() 49 | 50 | - name: Pylint 51 | run: | 52 | pylint --fail-under=10 dictgest 53 | if: always() 54 | 55 | - name: Coverage 56 | run: | 57 | pytest --cov=dictgest --cov-report=xml 58 | if: always() 59 | - uses: codecov/codecov-action@v1 60 | with: 61 | file: ./coverage.xml 62 | 63 | - name: Examples 64 | run: | 65 | for f in examples/*.py; do python "$f"; done 66 | if: always() 67 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *.pyc 2 | docs/build 3 | dictgest.egg-info 4 | dist 5 | docs/source/build 6 | docs/source/_build 7 | -------------------------------------------------------------------------------- /.readthedocs.yaml: -------------------------------------------------------------------------------- 1 | # .readthedocs.yaml 2 | # Read the Docs configuration file 3 | # See https://docs.readthedocs.io/en/stable/config-file/v2.html for details 4 | 5 | # Required 6 | version: 2 7 | 8 | # Set the version of Python and other tools you might need 9 | build: 10 | os: ubuntu-20.04 11 | tools: 12 | python: "3.10" 13 | # You can also specify other tool versions: 14 | # nodejs: "16" 15 | # rust: "1.55" 16 | # golang: "1.17" 17 | 18 | # Build documentation in the docs/ directory with Sphinx 19 | sphinx: 20 | configuration: docs/source/conf.py 21 | builder: singlehtml 22 | fail_on_warning: false 23 | 24 | # If using Sphinx, optionally build your docs in additional formats such as PDF 25 | # formats: 26 | # - pdf 27 | 28 | # Optionally declare the Python requirements required to build your docs 29 | python: 30 | install: 31 | - requirements: requirements-dev.txt 32 | - requirements: docs/requirements.txt 33 | -------------------------------------------------------------------------------- /.vscode/launch.json: -------------------------------------------------------------------------------- 1 | { 2 | // Use IntelliSense to learn about possible attributes. 3 | // Hover to view descriptions of existing attributes. 4 | // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387 5 | "version": "0.2.0", 6 | "configurations": [ 7 | { 8 | "name": "Pytest", 9 | "type": "python", 10 | "request": "launch", 11 | "env": { 12 | "_PYTEST_RAISE": "1" 13 | }, 14 | "module": "pytest", 15 | "justMyCode": false 16 | } 17 | ] 18 | } -------------------------------------------------------------------------------- /.vscode/settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "python.linting.flake8Enabled": true, 3 | "python.linting.enabled": true 4 | } -------------------------------------------------------------------------------- /LICENSE.txt: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Copyright (c) 2018 Pedro Rodriguez 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | 23 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include README.md 2 | include requirements.txt -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # DictGest - Python Dictionary Ingestion 2 | ![](https://github.com/bmsan/DictGest/blob/main/docs/source/dictgest_logo.jpg) 3 | 4 | [![Reliability Rating](https://sonarcloud.io/api/project_badges/measure?project=bmsan_DictGest&metric=reliability_rating)](https://sonarcloud.io/summary/new_code?id=bmsan_DictGest) 5 | [![Maintainability Rating](https://sonarcloud.io/api/project_badges/measure?project=bmsan_DictGest&metric=sqale_rating)](https://sonarcloud.io/summary/new_code?id=bmsan_DictGest) 6 | [![Quality Gate Status](https://sonarcloud.io/api/project_badges/measure?project=bmsan_DictGest&metric=alert_status)](https://sonarcloud.io/summary/new_code?id=bmsan_DictGest) 7 | [![Code Coverage](https://codecov.io/gh/bmsan/dictgest/branch/main/graph/badge.svg?token=WHTIAW8C85)](https://codecov.io/gh/bmsan/dictgest) 8 | [![CI status](https://github.com/bmsan/dictgest/workflows/CI/badge.svg)](https://github.com/bmsan/dictgest/actions?queryworkflow%3ACI+event%3Apush+branch%3Amain) 9 | [![Docs](https://readthedocs.org/projects/dictgest/badge/?version=latest)](https://readthedocs.org/projects/dictgest) 10 | ![MYPY](https://img.shields.io/badge/mypy-type%20checked-green) 11 | ![Pylint](https://img.shields.io/badge/Pylint-10.00/10-green) 12 | [![Discord](https://img.shields.io/discord/981859018836426752?label=Discord%20chat&style=flat)](https://discord.gg/yBb99rxBUZ) 13 | 14 | 15 | - [DictGest - Python Dictionary Ingestion](#dictgest---python-dictionary-ingestion) 16 | - [Description](#description) 17 | - [Examples](#examples) 18 | - [Example 1: Trivial Example - Handling Extra parameters](#example-1-trivial-example---handling-extra-parameters) 19 | - [Example 2: Data mapping renaming & rerouting](#example-2-data-mapping-renaming--rerouting) 20 | - [Example 3: Data type enforcing](#example-3-data-type-enforcing) 21 | - [Example 4: Custom Data extraction/conversion for a specific field](#example-4-custom-data-extractionconversion-for-a-specific-field) 22 | - [Example 5: Custom Data conversion for a specific type](#example-5-custom-data-conversion-for-a-specific-type) 23 | - [Example 6: Populating the same structure from multiple different dict formats (multiple APIs)](#example-6-populating-the-same-structure-from-multiple-different-dict-formats-multiple-apis) 24 | - [Example 8: Populating from a 2D Table](#example-8-populating-from-a-2d-table) 25 | - [Transposing data](#transposing-data) 26 | - [Mapping one table row to target type](#mapping-one-table-row-to-target-type) 27 | - [Installing](#installing) 28 | - [Contributing](#contributing) 29 | - [Support](#support) 30 | - [License](#license) 31 | - [Acknowledgements](#acknowledgements) 32 | 33 | # Description 34 | 35 | When interacting with external REST APIs or with external configuration files we usually do not have control 36 | over the received data structure/format. 37 | 38 | `DictGest` makes ingesting dictionary data into python objects(dataclasss objects included) easy when the dictionary data doesn't match 1 to 1 with the Python class: 39 | - The dictionary might have extra fields that are of no interest 40 | - The keys names in the dictionary do not match the class attribute names 41 | - The structure of nested dictionaries does not match the class structure 42 | - The data types in the dictionary do not match data types of the target class 43 | 44 | # Examples 45 | 46 | ## Example 1: Trivial Example - Handling Extra parameters 47 | The first most basic and trivial example is ingesting a dictionary that has extra data not of interest 48 | 49 | ```python 50 | from dictgest import from_dict 51 | ``` 52 | 53 | ![](https://github.com/bmsan/DictGest/blob/main/docs/source/ex1.png?raw=true) 54 | 55 | ```python 56 | car = from_dict(Car, dict_data) 57 | ``` 58 | 59 | ## Example 2: Data mapping renaming & rerouting 60 | The keys names in the source dictionary might not match the destionation class attribute names. 61 | Also the source dictionary might have a nested structure different than our desired structure. 62 | 63 | ```python 64 | from typing import Annotated 65 | from dataclasses import dataclass 66 | from dictgest import from_dict, Path 67 | ``` 68 | 69 | 70 | ![](https://github.com/bmsan/DictGest/blob/main/docs/source/ex2.png?raw=true) 71 | 72 | ```python 73 | article = from_dict(Article, news_api_data) 74 | meta = from_dict(ArticleMeta, news_api_data) 75 | stats = from_dict(ArticleStats, news_api_data) 76 | ``` 77 | 78 | The full working example can be found in the [examples folder](https://github.com/bmsan/DictGest/blob/main/examples/news_example.py) 79 | 80 | There can be cases where Annotating the type hints of the target class is not desired by the user or when mapping to multiple APIs might be required. 81 | For these cases look at examples 6 & 7 for an alternate solution. 82 | 83 | ## Example 3: Data type enforcing 84 | 85 | Sometimes the data coming from external sources might have different datatypes than what we desire. `dictgen` can do type conversion for you. 86 | 87 | 88 | 89 | ```py 90 | from dataclasses import dataclass 91 | from dictgest import from_dict, typecast 92 | 93 | @typecast # Makes the class type convertable when encountered as typing hint 94 | @dataclass # The dataclass is just an example, it could have an normal class 95 | class Measurment: 96 | temp: float 97 | humidity: float 98 | 99 | 100 | class Sensor: 101 | def __init__( 102 | self, name: str, location: str, uptime: float, readings: list[Measurment] 103 | ): 104 | ... 105 | ``` 106 | 107 | ![](https://github.com/bmsan/DictGest/blob/main/docs/source/ex3.png?raw=true) 108 | 109 | The conversions shown above were enabled by setting the `@typecast` decorator for the targetted classes. 110 | 111 | The full working example can be found in the [examples folder](https://github.com/bmsan/DictGest/blob/main/examples/typeconvert_example.py) 112 | 113 | 114 | 115 | ## Example 4: Custom Data extraction/conversion for a specific field 116 | Sometimes we might want to apply custom transforms to some fields when extracting the data from the dictionary. 117 | In this example we want to read the total number of votes, but in the dictionary source we only have two partial values: the positive and negative number of votes. 118 | 119 | We apply a custom transform to get our desired data, using the `extractor` argument of `dictgest.Path` 120 | 121 | ```py 122 | from typing import Annotated 123 | from dictgest import Path, from_dict 124 | 125 | 126 | def extract_votes(data): 127 | # creating a new value from two individual fields and converting them 128 | return int(data["positive"]) + int(data["negative"]) 129 | 130 | 131 | class Votes: 132 | def __init__( 133 | self, 134 | title, 135 | total_votes: Annotated[int, Path("details/votes", extractor=extract_votes)], 136 | ): 137 | ... 138 | 139 | article_data = { 140 | "title": "Python 4.0 will...", 141 | "details": {"votes": {"positive": "245", "negative": "30"}}, 142 | } 143 | 144 | 145 | votes = from_dict(Votes, article_data) 146 | 147 | ``` 148 | 149 | The full working example can be found in the [examples folder](https://github.com/bmsan/DictGest/blob/main/examples/extract_example.py) 150 | 151 | 152 | 153 | ## Example 5: Custom Data conversion for a specific type 154 | 155 | In some cases we might want to employ a custom conversion for a certain datatype. 156 | 157 | ```py 158 | from dataclasses import dataclass 159 | from dictgest import default_convertor, from_dict 160 | 161 | # Get any already registered bool convertor 162 | default_bool_conv = default_convertor.get_convertor(bool) 163 | 164 | # create a custom converter 165 | def custom_bool_conv(val): 166 | if val == "oups": 167 | return False 168 | 169 | # Let the other cases be treated as before 170 | return default_bool_conv(val) 171 | 172 | 173 | # register the custom converter for bool 174 | default_convertor.register(bool, custom_bool_conv) 175 | 176 | 177 | @dataclass 178 | class Result: 179 | finished: bool 180 | notified: bool 181 | 182 | 183 | result = from_dict(Result, {"finished": True, "notified": "oups"}) 184 | print(result) 185 | 186 | ``` 187 | 188 | ## Example 6: Populating the same structure from multiple different dict formats (multiple APIs) 189 | 190 | There are cases where you might read information from multiple heterogenous APIs and you might want to convert them all to the same structure. 191 | 192 | Previously we have annotated fields( using typing.Annotation hint ) with Path eg: ` name: Annotated[str, Path('article')] `. This works well for a single conversion mapping. 193 | 194 | For this current scenario we are going to decouple the class from the Routing. 195 | 196 | Previously single mapping scenario: 197 | ```py 198 | @dataclass 199 | class Article: 200 | author: str 201 | title: Annotated[str, Path("headline")] 202 | content: Annotated[str, Path("details/content")] 203 | 204 | ``` 205 | 206 | 207 | But now we have 2 API news sources 208 | 209 | ```py 210 | data_from_api1 = { 211 | "author": "H.O. Ward" 212 | "headline" : "Top 10 Python extensions", 213 | "other_fields" : ..., 214 | "details": { 215 | "content": "Here are the top 10...", 216 | "other_fields": ... 217 | } 218 | } 219 | 220 | data_from_api2 = { 221 | "author": "G.O. Gu" 222 | "news_title" : "Vscode gets a new facelift", 223 | "other_fields" : ..., 224 | "full_article": "Yesterday a new version ...", 225 | } 226 | 227 | 228 | } 229 | ``` 230 | 231 | We are going to use `dictgest.Route` to define multiple standalone routes. 232 | 233 | Our previous example becomes: 234 | ```py 235 | @dataclass 236 | class Article: 237 | author: str 238 | title: str # Path annotations are decoupled 239 | content: str 240 | 241 | # Routing equivalent to previous example 242 | article_api1 = Route(title="headline", content="details/content") 243 | 244 | # New Routing for a new dict structure 245 | article_api2 = Route(title="news_title", content="full_article") 246 | 247 | 248 | article1 = from_dict(Article, data_from_api1, routing=article_api1) 249 | article2 = from_dict(Article, data_from_api2, routing=article_api2) 250 | ``` 251 | 252 | 253 | The full working example can be found in the [examples folder](https://github.com/bmsan/DictGest/blob/main/examples/news_multi_example.py) 254 | 255 | 256 | ## Example 8: Populating from a 2D Table 257 | Sometimes when querying databases/external APIs the reponse might be in a form of a 2D Table (a list of lists) 258 | 259 | ```py 260 | header = ["humidity", "temperatures", "timestamps"] 261 | table_data = [ 262 | [0.4, 7.4, "1Dec2022"], 263 | ... 264 | [0.6, 5.4, "21Dec2022"], 265 | ] 266 | ``` 267 | And our desired target structure could look like this: 268 | 269 | ```py 270 | @dataclass 271 | class SenzorData: 272 | timestamps: list[datetime.datetime] 273 | temperatures: list[float] 274 | humidity: list[float] 275 | ``` 276 | 277 | ![](https://github.com/bmsan/DictGest/blob/main/docs/source/table2d.PNG?raw=true) 278 | 279 | In this example we would like each data column to be treated as a field of the target type. 280 | To ingest our data into our target type we can use `table_to_item` following: 281 | 282 | ```py 283 | import dictgest as dg 284 | 285 | result = dg.table_to_item(SenzorData, table_data, header) 286 | ``` 287 | 288 | ### Transposing data 289 | The operation can be also be performed row wise by using the `transpose = True` flag. 290 | 291 | ![](https://github.com/bmsan/DictGest/blob/main/docs/source/table2d_transpose.PNG?raw=true) 292 | 293 | So given 294 | 295 | ```py 296 | header = ["humidity", "temperatures", "timestamps"] 297 | table_data_transposed = [ 298 | # rows are switched with columns 299 | [0.4, ..., 0.6], 300 | [5.4, ..., 7.4] 301 | ["1Dec2022", ..., "21Dec2022"], 302 | ] 303 | 304 | result = dg.table_to_item(SenzorData, table_data_transposed, header, transpose=True) 305 | ``` 306 | 307 | ### Mapping one table row to target type 308 | We might not want to convert the whole table into a specific data type but map each row/column to a specific datatype. 309 | 310 | ```py 311 | #Unlike SenzorData defined previously SenzorDataPoint holds information only for a single specific time. 312 | @dataclass 313 | class SenzorDataPoint: 314 | timestamp: datetime.datetime 315 | temperature: float 316 | humidity: float 317 | ``` 318 | 319 | For this `table_to_items` can be used 320 | 321 | ``` 322 | result = dg.table_to_items(SenzorDataPoint, table_data, header) 323 | 324 | result = dg.table_to_items(SenzorDataPoint, table_data_transposed, header, transpose=True) 325 | ``` 326 | 327 | 328 | 329 | ## Installing 330 | 331 | ``` 332 | pip install dictgest 333 | ``` 334 | 335 | ## Contributing 336 | 337 | First off, thanks for taking the time to contribute! Contributions are what makes the open-source community such an amazing place to learn, inspire, and create. Any contributions you make will benefit everybody else and are **greatly appreciated**. 338 | 339 | ## Support 340 | 341 | Reach out to the maintainer at one of the following places: 342 | - [Github issues](https://github.com/bmsan/DictGest/issues) 343 | - [Discord](https://discord.gg/yBb99rxBUZ) 344 | 345 | 346 | ## License 347 | 348 | This project is licensed under the **MIT license**. Feel free to edit and distribute this template as you like. 349 | 350 | See [LICENSE](LICENSE) for more information. 351 | 352 | ## Acknowledgements 353 | 354 | - Thanks [Dan Oneata](https://github.com/danoneata) for the discussions related to usecases and API. 355 | -------------------------------------------------------------------------------- /changelog.md: -------------------------------------------------------------------------------- 1 | # 0.3.0 2 | - Enabled multi-routing support ( ability to work with heterogenous dictionaries from different sources) 3 | - Code coverage increased to 95% 4 | - Added sonarcloud static checking 5 | # 0.2.0 6 | - Enabled the default_converter functionality for strings and dates 7 | - Increased code coverage 8 | - Some code refactoring 9 | - Added more examples to the readme.md 10 | 11 | # 0.1.0 12 | Initial version 13 | -------------------------------------------------------------------------------- /dictgest/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Package for ingesting dictionary data into python classes. 3 | """ 4 | 5 | __all__ = [ 6 | "from_dict", 7 | "table_to_item", 8 | "table_to_items", 9 | "typecast", 10 | "Path", 11 | "default_convertor", 12 | "Route", 13 | "Chart", 14 | ] 15 | from .serdes import from_dict, typecast, table_to_item, table_to_items 16 | from .routes import Path, Route, Chart 17 | from .converter import default_convertor 18 | -------------------------------------------------------------------------------- /dictgest/cast.py: -------------------------------------------------------------------------------- 1 | import copy 2 | import inspect 3 | from typing import ( # type: ignore 4 | Any, 5 | Callable, 6 | Iterable, 7 | Mapping, 8 | MutableMapping, 9 | Optional, 10 | Protocol, 11 | TypeVar, 12 | cast, 13 | get_args, 14 | get_origin, 15 | runtime_checkable, 16 | types, 17 | ) 18 | 19 | from dictgest.routes import Chart 20 | 21 | T = TypeVar("T") 22 | M = TypeVar("M", bound=Mapping) 23 | 24 | TypeConverterMap = Mapping[type[T], Callable[[Any], T]] 25 | RouteMap = Mapping[type, Any] 26 | TypeConvertor = Callable[[Any], T] 27 | 28 | 29 | @runtime_checkable 30 | class TypeCastable(Protocol): 31 | """Runtime checkable protocol for classes that need type conversion. 32 | Classes can be decorated as TypeCastable using @typecast decorator 33 | """ 34 | 35 | @staticmethod 36 | def __typecast__( 37 | data: dict[str, Any], 38 | mapping: Optional[TypeConverterMap], 39 | routes: Optional[Chart], 40 | ): 41 | ... 42 | 43 | 44 | def convert_mapping( 45 | data: Mapping, 46 | dtype: type[T], 47 | mappings: TypeConverterMap[T] = None, 48 | routing: Chart = None, # pylint: disable=W0613 49 | ) -> T: 50 | """Convert data to sepcified Mapping Annotated type 51 | 52 | Parameters 53 | ---------- 54 | data 55 | Source data to be converted 56 | dtype 57 | Desired Mapping type of the result 58 | mappings, optional 59 | Converters for mapping types, by default None 60 | 61 | Returns 62 | ------- 63 | data converted to dtype 64 | 65 | Raises 66 | ------ 67 | ValueError 68 | _description_ 69 | """ 70 | origin = get_origin(dtype) 71 | args = get_args(dtype) 72 | assert isinstance(origin, type) 73 | 74 | if not issubclass(origin, MutableMapping): 75 | raise ValueError() 76 | 77 | assert len(args) == 2 78 | assert isinstance(data, Mapping) 79 | # Here we should actually try to create the mapping type 80 | # 1. Try to create the Mapping datatype 81 | # 2. If above fails try to copy if the original data is the desired datatype 82 | # 3. Fallback to dictionary 83 | # res: Mapping = {} 84 | try: 85 | res = origin() 86 | except (TypeError, ValueError): 87 | res = copy.copy(data) if isinstance(data, origin) else {} 88 | key_type, val_type = args 89 | for key, val in data.items(): 90 | key = convert(key, key_type, mappings) 91 | val = convert(val, val_type, mappings) 92 | res[key] = val 93 | return cast(T, res) 94 | 95 | 96 | def convert_iterable( 97 | data, 98 | dtype: type[T], 99 | mappings: TypeConverterMap[T] = None, 100 | routing: Chart = None, # pylint: disable=W0613 101 | ) -> T: 102 | """Convert data according to the annotated Iterable datatype 103 | 104 | Parameters 105 | ---------- 106 | data 107 | Source data to be converted 108 | dtype 109 | Desired result iterable data type 110 | mappings, optional 111 | Predefined conversions, by default None 112 | 113 | Returns 114 | ------- 115 | Converted data 116 | """ 117 | origin = get_origin(dtype) 118 | args = get_args(dtype) 119 | assert isinstance(origin, type) 120 | if not issubclass(origin, Iterable): 121 | raise ValueError() 122 | 123 | elements: list[Any] = [] 124 | if len(args) == 1: 125 | elements.extend(convert(el, args[0], mappings) for el in data) 126 | else: 127 | assert len(args) == len(data) 128 | elements.extend(convert(el, dt_val, mappings) for dt_val, el in zip(args, data)) 129 | 130 | return origin(elements) # type: ignore 131 | 132 | 133 | def convert_base_type( 134 | data: Any, 135 | dtype: type[T], 136 | type_mappings: TypeConverterMap[T] = None, 137 | routing: Chart = None, 138 | ) -> T: 139 | """ 140 | Datatype conversion function when dtype isn't a generic alias 141 | See `convert` for details 142 | """ 143 | if type_mappings and dtype in type_mappings: 144 | return type_mappings[dtype](data) 145 | 146 | # base type 147 | if issubclass(dtype, TypeCastable): 148 | # Type has been decorated with the @typecast decorator 149 | return dtype.__typecast__(data, type_mappings, routing) 150 | if routing and dtype in routing: 151 | if routing.typecast: 152 | return routing.typecast(dtype, data, type_mappings, routing) 153 | raise ValueError("routing.typecast was not set") 154 | # try default conversion 155 | return dtype(data) # type: ignore 156 | 157 | 158 | def convert_generic_alias( 159 | data: Any, 160 | dtype: type[T], 161 | type_mappings: TypeConverterMap[T] = None, 162 | routing: Chart = None, 163 | ) -> T: 164 | """ 165 | Datatype conversion function for dtype of `types.GenericAlias`. 166 | See `convert` for details 167 | """ 168 | if type_mappings and dtype in type_mappings: 169 | return type_mappings[dtype](data) 170 | 171 | origin = get_origin(dtype) 172 | assert isinstance(origin, type) 173 | if issubclass(origin, Mapping): 174 | if not isinstance(data, Mapping): 175 | raise TypeError(f"Cannot convert from {type(data)} to : {dtype}") 176 | return convert_mapping(data, dtype, type_mappings, routing) 177 | if not issubclass(origin, Iterable): 178 | raise ValueError(f"{origin}") 179 | if not isinstance(data, Iterable): 180 | raise TypeError(f"Cannot convert from {type(data)} to : {dtype}") 181 | return convert_iterable(data, dtype, type_mappings, routing) 182 | 183 | 184 | def convert( 185 | data: Any, 186 | dtype: Optional[type[T]], 187 | type_mappings: TypeConverterMap[T] = None, 188 | routing: Chart = None, 189 | ) -> T: 190 | """Converts a data value to a specified data type. 191 | 192 | Parameters 193 | ---------- 194 | data 195 | Data to be converted 196 | dtype 197 | Type to convert 198 | type_mappings, optional 199 | predefined convertor map for certain data types 200 | 201 | Returns 202 | ------- 203 | The converted datatype 204 | """ 205 | empty = inspect.Parameter.empty 206 | if dtype is None or dtype is empty: 207 | return data # no datatype was specified 208 | if type(dtype) == type: # pylint: disable=C0123 209 | if isinstance(data, dtype): 210 | return data # already the right type 211 | return convert_base_type(data, dtype, type_mappings, routing) 212 | if type(dtype) == types.GenericAlias: # pylint: disable=C0123 213 | return convert_generic_alias(data, dtype, type_mappings, routing) 214 | raise ValueError(f"{type(dtype)}, {dtype}") 215 | -------------------------------------------------------------------------------- /dictgest/converter.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime 2 | from typing import Mapping, TypeVar 3 | from dateutil import parser as date_parser 4 | from dictgest.cast import TypeConvertor 5 | 6 | 7 | T = TypeVar("T") 8 | 9 | 10 | class Convertor(Mapping): 11 | """ 12 | Used to convert data to certain datatypes. 13 | Conversion mappings need to be registered using the `register` method 14 | """ 15 | 16 | def __init__(self): 17 | self.mappings: dict[type, TypeConvertor] = {} 18 | 19 | def register(self, dtype: type[T], converter: TypeConvertor[T]): 20 | """Registers a convertor for a data type 21 | 22 | Parameters 23 | ---------- 24 | dtype 25 | Data type for which to use convertor 26 | converter 27 | Callable capable of converting data to dtype 28 | """ 29 | 30 | self.mappings[dtype] = converter 31 | 32 | def __getitem__(self, key): 33 | return self.mappings[key] 34 | 35 | def get_converter(self, key): 36 | """Return registered conversion for key type. 37 | If no conversion is registred, return the last resort convertor (the type constructor) 38 | """ 39 | return self[key] if key in self else key 40 | 41 | def __contains__(self, key): 42 | return key in self.mappings 43 | 44 | def __len__(self): 45 | return len(self.mappings) 46 | 47 | def __iter__(self): 48 | yield from self.mappings 49 | 50 | 51 | def bool_converter(val) -> bool: 52 | """Convert to bool""" 53 | if isinstance(val, bool): 54 | return val 55 | if val == 1: 56 | return True 57 | if val == 0: 58 | return False 59 | if isinstance(val, str): 60 | lower = val.lower().strip() 61 | if lower in ["true", "yes", "ok"]: 62 | return True 63 | if lower in ["false", "no"]: 64 | return False 65 | raise ValueError(f"Unable to convert [{val}] to bool") 66 | 67 | 68 | def date_convertor(val) -> datetime: 69 | """Convert value to datetime. 70 | If the input is numeric it will be treated as unixtime. 71 | If the input is a string the format will be autodeduced 72 | """ 73 | if isinstance(val, datetime): 74 | return val 75 | try: 76 | val = float(val) 77 | return datetime.utcfromtimestamp(val) 78 | except ValueError: 79 | pass 80 | 81 | return date_parser.parse(val) 82 | 83 | 84 | default_convertor = Convertor() 85 | 86 | default_convertor.register(datetime, date_convertor) 87 | default_convertor.register(bool, bool_converter) 88 | -------------------------------------------------------------------------------- /dictgest/routes.py: -------------------------------------------------------------------------------- 1 | import inspect 2 | from typing import Any, Callable, Iterable, Mapping, Optional, Union 3 | 4 | from dictgest.utils import flatten 5 | 6 | 7 | class Path: 8 | """Data type annotation for class attributes that can signal: 9 | - renaming: maping a dictionary field to an attribute with a different name 10 | - rerouting: mapping a nested dictionary field to a class attribute 11 | - Setting a default data converter for the field 12 | 13 | Its is used in conjunction with Pythons ``Typing.Annotated`` functionality 14 | 15 | .. code-block:: python 16 | 17 | class Model: 18 | def __init__(self, 19 | // the module will extract the 'field1' key 20 | field1, 21 | // the module will extract the 'name' key 22 | field2 : Annotated[str, Path('name')] 23 | // the module will extract the ['p1']['p2']['val'] field 24 | field3 : Annotated[str, Path('p1/p2/val')] 25 | ) 26 | 27 | """ 28 | 29 | def __init__(self, path: str, extractor: Callable = None, flatten_en=True) -> None: 30 | """ 31 | 32 | Parameters 33 | ---------- 34 | path 35 | Extraction path(key/keys) from dictionary. 36 | Eg: path='name1' will map the annotated field to a dictionary key 'name1' 37 | Eg: path='p1/p2/name2' will map the annotated field to nested_data['p1']['p2']['name2'] 38 | extractor, optional 39 | Callable to extract/convert the data from the specified path, by default None 40 | flatten_en, optional 41 | In case the path contains an element which is a list, flatten it's elements 42 | 43 | Eg: data = {'a': [ 44 | [{'b': 1}, {'b': 2}], 45 | [{'b': 3}] 46 | ]} 47 | path='a/b' with flatten_en would result in the extraction of [1, 2, 3] 48 | """ 49 | 50 | self.path = path 51 | self.parts = path.split("/") 52 | self.extractor = extractor 53 | self.flatten_en = flatten_en 54 | 55 | @staticmethod 56 | def _wildcard_extract(data: Iterable, part: str): 57 | if not isinstance(data, (list, tuple)): 58 | raise TypeError() 59 | if "{" in part: 60 | name, val = part.split("{", 1)[1].split("}", 1)[0].split("=") 61 | data = [el for el in data if name in el and str(el[name]) == val] 62 | return data 63 | 64 | def _iterable_extract(self, data: Iterable, part: str) -> list: 65 | data = [o[part] for o in data if part in o] 66 | if self.flatten_en: 67 | data = flatten(data) 68 | return data 69 | 70 | def extract(self, data: dict[str, Any]): 71 | """Extract element from dictionary data from the configured path. 72 | 73 | Parameters 74 | ---------- 75 | data 76 | Dictionary from which to extract the targeted value 77 | 78 | Returns 79 | ------- 80 | Extracted value 81 | 82 | """ 83 | for part in self.parts: 84 | 85 | if part.startswith("*"): 86 | data = Path._wildcard_extract(data, part) 87 | elif isinstance(data, (list, tuple)): 88 | data = self._iterable_extract(data, part) 89 | 90 | elif part != "": 91 | data = data[part] 92 | if self.extractor is not None: 93 | data = self.extractor(data) 94 | return data 95 | 96 | def get(self, data: dict, default): 97 | """`extract` with default value in case of failure""" 98 | try: 99 | return self.extract(data) 100 | except KeyError: 101 | return default 102 | 103 | 104 | class Route: 105 | """A Template/Chart describing the routing between a class and dictionary 106 | 107 | Initialized with keyword arguments containing the mapping. 108 | - The keys correspond to the destination field names. 109 | - The values correspond to the extraction path. They can be of type `value` or type `Path` 110 | 111 | Example 112 | -------- 113 | Route( title="headline", 114 | category="description/category", 115 | content=Path("description/content"), 116 | votes=Path("meta/traffic", extractor=votes_extreactor) 117 | ) 118 | """ 119 | 120 | def __init__(self, **kwargs: dict[str, Union[Path, str]]) -> None: 121 | """kwargs: 122 | - keys : destination mapping names 123 | - values: dictionary path. 124 | """ 125 | if not kwargs: 126 | raise ValueError("Did not pass any parameters to route") 127 | self.mapping = kwargs 128 | for key, val in kwargs.items(): 129 | if isinstance(val, str): 130 | self.mapping[key] = Path(val) 131 | elif not isinstance(val, Path): 132 | raise TypeError( 133 | f"Encountered field of type: {type(val)}, expecting Path or str" 134 | ) 135 | 136 | def __getitem__(self, key): 137 | return self.mapping[key] if key in self.mapping else None 138 | 139 | def check_type(self, dtype: type): 140 | """ 141 | Check if the dtype is compatible with the Route 142 | """ 143 | params = inspect.signature(dtype).parameters 144 | self.check_params(params) 145 | 146 | def check_params(self, params: Iterable[str]): 147 | """Chek if the parameter names are compatible with the Route""" 148 | params = set(params) 149 | for key in self.mapping: 150 | if key not in params: 151 | raise ValueError( 152 | f"Route containing field {key}, but not present in target class " 153 | ) 154 | 155 | 156 | class Chart: 157 | """A chart is a collection of routes mapped to classes. 158 | A chart describes the way a dictionary ingestion should happen, 159 | when multiple different classes will be converted. 160 | """ 161 | 162 | def __init__(self, routes: Mapping[type, Route]): 163 | if not isinstance(routes, Mapping): 164 | raise TypeError(f"Expected a Mapping type, received {type(routes)}") 165 | self.routes = routes 166 | self.typecast: Optional[Callable] = None 167 | self.check() 168 | 169 | def check(self): 170 | """Check the validity of the chart. 171 | A chart can be invalid if the configured routes cannot be mapped to targeted objects. 172 | Eg: one of the routes contains a field that is not present in the data type 173 | """ 174 | for dtype, route in self.routes.items(): 175 | route.check_type(dtype) 176 | 177 | def __contains__(self, key): 178 | return key in self.routes 179 | 180 | def __getitem__(self, key): 181 | return self.routes[key] 182 | -------------------------------------------------------------------------------- /dictgest/serdes.py: -------------------------------------------------------------------------------- 1 | import inspect 2 | from typing import ( # type: ignore 3 | Iterable, 4 | Optional, 5 | TypeVar, 6 | Union, 7 | types, 8 | _AnnotatedAlias, 9 | ) # type: ignore 10 | from functools import partial 11 | 12 | from dictgest.routes import Chart, Path, Route 13 | 14 | from .cast import TypeConverterMap, convert 15 | from .converter import default_convertor 16 | 17 | T = TypeVar("T", bound=type) 18 | 19 | 20 | def typecast(cls): 21 | """ 22 | Decorates a python class(including dataclass) 23 | to enable automatic type conversion. 24 | Can be used as a class decorator 25 | 26 | Examples 27 | -------- 28 | It can be used as a class decorator 29 | 30 | >>> @typecast 31 | >>> class MyClass: 32 | >>> ... 33 | 34 | But also as a function call 35 | 36 | >>> typecast(MyClass) 37 | 38 | Returns 39 | ------- 40 | The decorated class 41 | """ 42 | cls.__typecast__ = partial(from_dict, cls) 43 | return cls 44 | 45 | 46 | def _get_dtype_from_anot(anot) -> Optional[type]: 47 | dtype: Optional[type] = None 48 | if type(anot) in [type, types.GenericAlias]: 49 | dtype = anot 50 | elif type(anot) == _AnnotatedAlias: 51 | dtype = anot.__origin__ 52 | return dtype 53 | 54 | 55 | def _get_path_from_anot(anot): 56 | _path = None 57 | if hasattr(anot, "__metadata__"): 58 | for meta in anot.__metadata__: 59 | if isinstance(meta, Path): 60 | _path = meta 61 | break 62 | return _path 63 | 64 | 65 | def _get_route_path(anot, name: str, route_template: Optional[Route]): 66 | anot_path = _get_path_from_anot(anot) 67 | template_path = route_template[name] if route_template else None 68 | 69 | if anot_path and template_path: 70 | raise ValueError( 71 | f"For field {name}, the path was found in both the template and destination path" 72 | ) 73 | 74 | return template_path or anot_path 75 | 76 | 77 | def _construct_routing( 78 | dtype: type, routing: Union[Route, dict[type, Route], Chart, None] 79 | ) -> Optional[Chart]: 80 | chart = None 81 | if routing: 82 | if isinstance(routing, Chart): 83 | chart = routing 84 | elif isinstance(routing, Route): 85 | chart = Chart({dtype: routing}) 86 | else: 87 | chart = Chart(routing) 88 | chart.typecast = from_dict 89 | return chart 90 | 91 | 92 | def from_dict( 93 | target: type[T], 94 | data: dict, 95 | type_mappings: TypeConverterMap = default_convertor, 96 | routing: Union[Route, dict[type, Route], Chart] = None, 97 | convert_types: bool = True, 98 | ) -> T: 99 | """Converts a dictionary to the desired target type. 100 | 101 | Parameters 102 | ---------- 103 | target 104 | Target conversion type 105 | data 106 | dictionary data to be converted to target type 107 | type_mappings, optional 108 | custom conversion mapping for datatypess, by default None 109 | routing, optional 110 | custom conversion routing for fieldnames, see `Route` 111 | convert_types, optional 112 | if target fields should be converted to typing hint types. 113 | 114 | Returns 115 | ------- 116 | The converted datatype 117 | 118 | """ 119 | empty = inspect.Parameter.empty 120 | params = inspect.signature(target).parameters 121 | 122 | routing = _construct_routing(target, routing) 123 | router = routing[target] if routing and target in routing else None 124 | 125 | kwargs = {} 126 | for name, prop in params.items(): 127 | anot = prop.annotation 128 | 129 | dtype = _get_dtype_from_anot(anot) 130 | _path = _get_route_path(anot, name, router) 131 | val = _path.get(data, prop.default) if _path else data.get(name, prop.default) 132 | if val == empty: 133 | raise ValueError(f"Missing parameter {name}") 134 | if convert_types: 135 | val = convert(val, dtype, type_mappings, routing) 136 | kwargs[name] = val 137 | 138 | return target(**kwargs) # type: ignore 139 | 140 | 141 | def _get_row(data: list[list], transpose: bool): 142 | if transpose: 143 | for idx in range(len(data[0])): 144 | yield [row[idx] for row in data] 145 | else: 146 | yield from data 147 | 148 | 149 | def table_to_item( 150 | target: type[T], 151 | data: list[list], 152 | header: list[str], 153 | transpose: bool = False, 154 | type_mappings: TypeConverterMap = default_convertor, 155 | routing: Union[Route, dict[type, Route], Chart] = None, 156 | convert_types: bool = True, 157 | # pylint: disable=R0913 158 | ) -> T: 159 | """Converts a table (2d structure) to the desired target type. 160 | The table columns are regarded as target fields and the 161 | field names are given in the header parameter. 162 | Parameters 163 | ---------- 164 | target 165 | Target conversion type 166 | data 167 | 2d table (nested lists) that will be converted 168 | header 169 | column names of the 2d table 170 | transpose 171 | switch rows with columns(eg: first row becomes first column and viceversa) 172 | type_mappings, optional 173 | custom conversion mapping for datatypess, by default None 174 | routing, optional 175 | custom conversion routing for fieldnames, see `Route` 176 | convert_types, optional 177 | if target fields should be converted to typing hint types. 178 | 179 | Returns 180 | ------- 181 | The converted datatype 182 | 183 | """ 184 | 185 | if transpose: 186 | if len(data) != len(header): 187 | raise ValueError( 188 | f"Header has {len(header)} elements while table {len(data)}" 189 | ) 190 | dict_of_lists = {key: item for item, key in zip(data, header)} 191 | else: 192 | if len(data[0]) != len(header): 193 | raise ValueError( 194 | f"Header has {len(header)} elements while table {len(data[0])}" 195 | ) 196 | dict_of_lists = { 197 | key: [row[col_idx] for row in data] for col_idx, key in enumerate(header) 198 | } 199 | 200 | return from_dict( 201 | target, 202 | dict_of_lists, 203 | type_mappings=type_mappings, 204 | routing=routing, 205 | convert_types=convert_types, 206 | ) 207 | 208 | 209 | def table_to_items( 210 | target: type[T], 211 | data: list[list], 212 | header: list[str], 213 | transpose: bool = False, 214 | type_mappings: TypeConverterMap = default_convertor, 215 | routing: Union[Route, dict[type, Route], Chart] = None, 216 | convert_types: bool = True, 217 | # pylint: disable=R0913 218 | ) -> Iterable[T]: 219 | """Converts a table (2d structure) to a list of items of the desired target type. 220 | Each table row is regarded as an item to be converted. 221 | The field names are given in the header parameter. 222 | 223 | Parameters 224 | ---------- 225 | target 226 | Target conversion type 227 | data 228 | 2d table (nested lists) that will be converted 229 | header 230 | column names of the 2d table 231 | transpose 232 | switch rows with columns(eg: first row becomes first column and viceversa) 233 | type_mappings, optional 234 | custom conversion mapping for datatypess, by default None 235 | routing, optional 236 | custom conversion routing for fieldnames, see `Route` 237 | convert_types, optional 238 | if target fields should be converted to typing hint types. 239 | 240 | Returns 241 | ------- 242 | The converted datatype 243 | 244 | """ 245 | for row_idx, row in enumerate(_get_row(data, transpose)): 246 | if len(row) != len(header): 247 | raise ValueError( 248 | f"Header has {len(header)} elements while table row[{row_idx}] has {len(data)}" 249 | ) 250 | dict_to_convert = {key: item for item, key in zip(row, header)} 251 | 252 | yield from_dict( 253 | target, 254 | dict_to_convert, 255 | type_mappings=type_mappings, 256 | routing=routing, 257 | convert_types=convert_types, 258 | ) 259 | -------------------------------------------------------------------------------- /dictgest/utils.py: -------------------------------------------------------------------------------- 1 | def flatten(data: list) -> list: 2 | """Flatten a nested list 3 | Eg: [[a, b, c], [d, e]] => [a, b, c, d, e] 4 | 5 | """ 6 | if data and isinstance(data[0], (list, tuple)): 7 | out = [] 8 | for elem in data: 9 | out += elem 10 | return out 11 | return data 12 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Minimal makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line, and also 5 | # from the environment for the first two. 6 | SPHINXOPTS ?= 7 | SPHINXBUILD ?= sphinx-build 8 | SOURCEDIR = source 9 | BUILDDIR = build 10 | 11 | # Put it first so that "make" without argument is like "make help". 12 | help: 13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 14 | 15 | .PHONY: help Makefile 16 | 17 | # Catch-all target: route all unknown targets to Sphinx using the new 18 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). 19 | %: Makefile 20 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 21 | -------------------------------------------------------------------------------- /docs/make.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | pushd %~dp0 4 | 5 | REM Command file for Sphinx documentation 6 | 7 | if "%SPHINXBUILD%" == "" ( 8 | set SPHINXBUILD=sphinx-build 9 | ) 10 | set SOURCEDIR=source 11 | set BUILDDIR=build 12 | 13 | %SPHINXBUILD% >NUL 2>NUL 14 | if errorlevel 9009 ( 15 | echo. 16 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx 17 | echo.installed, then set the SPHINXBUILD environment variable to point 18 | echo.to the full path of the 'sphinx-build' executable. Alternatively you 19 | echo.may add the Sphinx directory to PATH. 20 | echo. 21 | echo.If you don't have Sphinx installed, grab it from 22 | echo.https://www.sphinx-doc.org/ 23 | exit /b 1 24 | ) 25 | 26 | if "%1" == "" goto help 27 | 28 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% 29 | goto end 30 | 31 | :help 32 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% 33 | 34 | :end 35 | popd 36 | -------------------------------------------------------------------------------- /docs/requirements.txt: -------------------------------------------------------------------------------- 1 | Sphinx==4.5.0 2 | myst-parser==0.17.2 3 | sphinx_markdown_builder==0.5.5 4 | sphinxcontrib-applehelp==1.0.2 5 | sphinxcontrib-devhelp==1.0.2 6 | sphinxcontrib-htmlhelp==2.0.0 7 | sphinxcontrib-jsmath==1.0.1 8 | sphinxcontrib-qthelp==1.0.3 9 | sphinxcontrib-serializinghtml==1.1.5 -------------------------------------------------------------------------------- /docs/source/README.md: -------------------------------------------------------------------------------- 1 | ../../README.md -------------------------------------------------------------------------------- /docs/source/api.rst: -------------------------------------------------------------------------------- 1 | Documentation 2 | ================= 3 | 4 | API 5 | ----------- 6 | 7 | .. automodule:: dictgest 8 | :members: 9 | :show-inheritance: 10 | 11 | Internal Documentation 12 | ----------------------- 13 | 14 | .. automodule:: dictgest.cast 15 | :members: 16 | :show-inheritance: 17 | 18 | .. automodule:: dictgest.serdes 19 | :members: 20 | :show-inheritance: 21 | 22 | 23 | 24 | 25 | -------------------------------------------------------------------------------- /docs/source/conf.py: -------------------------------------------------------------------------------- 1 | # Configuration file for the Sphinx documentation builder. 2 | # 3 | # This file only contains a selection of the most common options. For a full 4 | # list see the documentation: 5 | # https://www.sphinx-doc.org/en/master/usage/configuration.html 6 | 7 | # -- Path setup -------------------------------------------------------------- 8 | 9 | # If extensions (or modules to document with autodoc) are in another directory, 10 | # add these directories to sys.path here. If the directory is relative to the 11 | # documentation root, use os.path.abspath to make it absolute, like shown here. 12 | # 13 | import os 14 | import sys 15 | 16 | print(os.getcwd()) 17 | sys.path.insert(0, os.path.abspath("../..")) 18 | 19 | 20 | # -- Project information ----------------------------------------------------- 21 | 22 | project = "DictGest" 23 | copyright = "2022, bmasn" 24 | author = "bmasn" 25 | 26 | 27 | # -- General configuration --------------------------------------------------- 28 | 29 | # Add any Sphinx extension module names here, as strings. They can be 30 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom 31 | # ones. 32 | extensions = [ 33 | "sphinx.ext.napoleon", 34 | "sphinx.ext.autodoc", 35 | "sphinx.ext.coverage", 36 | "sphinx.ext.viewcode", 37 | "myst_parser", 38 | "sphinx_markdown_builder", 39 | ] 40 | 41 | napoleon_preprocess_types = True 42 | # Add any paths that contain templates here, relative to this directory. 43 | templates_path = ["_templates"] 44 | 45 | # List of patterns, relative to source directory, that match files and 46 | # directories to ignore when looking for source files. 47 | # This pattern also affects html_static_path and html_extra_path. 48 | exclude_patterns = ["build/*"] 49 | 50 | 51 | # -- Options for HTML output ------------------------------------------------- 52 | 53 | # The theme to use for HTML and HTML Help pages. See the documentation for 54 | # a list of builtin themes. 55 | # 56 | html_theme = "alabaster" 57 | 58 | # Add any paths that contain custom static files (such as style sheets) here, 59 | # relative to this directory. They are copied after the builtin static files, 60 | # so a file named "default.css" will overwrite the builtin "default.css". 61 | html_static_path = ["_static"] 62 | -------------------------------------------------------------------------------- /docs/source/dictgest_logo.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bmsan/DictGest/5e07acce54cc601a35263ab052280bd2f6c0bf0a/docs/source/dictgest_logo.jpg -------------------------------------------------------------------------------- /docs/source/ex1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bmsan/DictGest/5e07acce54cc601a35263ab052280bd2f6c0bf0a/docs/source/ex1.png -------------------------------------------------------------------------------- /docs/source/ex2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bmsan/DictGest/5e07acce54cc601a35263ab052280bd2f6c0bf0a/docs/source/ex2.png -------------------------------------------------------------------------------- /docs/source/ex3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bmsan/DictGest/5e07acce54cc601a35263ab052280bd2f6c0bf0a/docs/source/ex3.png -------------------------------------------------------------------------------- /docs/source/index.rst: -------------------------------------------------------------------------------- 1 | .. DictGest documentation master file, created by 2 | sphinx-quickstart on Tue May 31 16:35:33 2022. 3 | You can adapt this file completely to your liking, but it should at least 4 | contain the root `toctree` directive. 5 | 6 | Welcome to DictGest's documentation! 7 | ==================================== 8 | 9 | .. toctree:: 10 | :maxdepth: 2 11 | :caption: Contents: 12 | 13 | README.md 14 | api 15 | 16 | Indices and tables 17 | ================== 18 | 19 | * :ref:`genindex` 20 | * :ref:`modindex` 21 | * :ref:`search` 22 | -------------------------------------------------------------------------------- /docs/source/table2d.PNG: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bmsan/DictGest/5e07acce54cc601a35263ab052280bd2f6c0bf0a/docs/source/table2d.PNG -------------------------------------------------------------------------------- /docs/source/table2d_transpose.PNG: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bmsan/DictGest/5e07acce54cc601a35263ab052280bd2f6c0bf0a/docs/source/table2d_transpose.PNG -------------------------------------------------------------------------------- /examples/custom_conversion_example.py: -------------------------------------------------------------------------------- 1 | from dataclasses import dataclass 2 | from dictgest import default_convertor, from_dict 3 | from dataclasses import dataclass 4 | 5 | # Get any already registered bool convertor 6 | default_bool_conv = default_convertor.get_converter(bool) 7 | 8 | # create a custom converter 9 | def custom_bool_conv(val): 10 | if val == "oups": 11 | return False 12 | 13 | # Let the other cases be treated as before 14 | return default_bool_conv(val) 15 | 16 | 17 | # register the custom converter for bool 18 | default_convertor.register(bool, custom_bool_conv) 19 | 20 | 21 | @dataclass 22 | class Result: 23 | finished: bool 24 | notified: bool 25 | 26 | 27 | result = from_dict(Result, {"finished": True, "notified": "oups"}) 28 | print(result) 29 | -------------------------------------------------------------------------------- /examples/extract_example.py: -------------------------------------------------------------------------------- 1 | from typing import Annotated 2 | from dictgest import Path, from_dict 3 | 4 | 5 | def extract_votes(data): 6 | # creating a new value from two individual fields and converting them 7 | return int(data["positive"]) + int(data["negative"]) 8 | 9 | 10 | class Votes: 11 | def __init__( 12 | self, 13 | title, 14 | total_votes: Annotated[int, Path("details/votes", extractor=extract_votes)], 15 | ): 16 | self.title = title 17 | self.total_votes = total_votes 18 | 19 | def __repr__(self): 20 | return str(self.__dict__) 21 | 22 | 23 | article_data = { 24 | "title": "Python 4.0 will...", 25 | "details": {"votes": {"positive": "245", "negative": "30"}}, 26 | } 27 | 28 | 29 | votes = from_dict(Votes, article_data) 30 | print(votes) 31 | -------------------------------------------------------------------------------- /examples/news_example.py: -------------------------------------------------------------------------------- 1 | from dataclasses import dataclass 2 | from typing import Annotated 3 | from dictgest import from_dict, Path 4 | 5 | 6 | news_api_data = { 7 | "author": "H.O. Ward", 8 | "headline": "Will statically typed python become a thing?", 9 | "details": { 10 | "content": "Over the past 10 years ...[+]", 11 | "description": "Statically typing is getting ...[+]", 12 | "views": 32, 13 | "comments": 2, 14 | }, 15 | "seo": {"tags": ["python", "programming"], "kwrds": ["guido", "python"]}, 16 | } 17 | 18 | 19 | @dataclass 20 | class Article: 21 | author: str 22 | title: Annotated[str, Path("headline")] 23 | content: Annotated[str, Path("details/content")] 24 | 25 | 26 | class ArticleMeta: 27 | def __init__( 28 | self, 29 | description: Annotated[str, Path("details/description")], 30 | tags: Annotated[str, Path("seo/tags")], 31 | keywords: Annotated[str, Path("seo/kwrds")], 32 | ): 33 | ... 34 | 35 | 36 | @dataclass 37 | class ArticleStats: 38 | views: Annotated[int, Path("details/views")] 39 | num_comments: Annotated[int, Path("details/comments")] 40 | 41 | 42 | article = from_dict(Article, news_api_data) 43 | meta = from_dict(ArticleMeta, news_api_data) 44 | stats = from_dict(ArticleStats, news_api_data) 45 | print(article) 46 | print(meta) 47 | print(stats) 48 | -------------------------------------------------------------------------------- /examples/news_multi_adv_example.py: -------------------------------------------------------------------------------- 1 | from dataclasses import dataclass 2 | from typing import Annotated 3 | from dictgest import from_dict, Path, Route 4 | 5 | 6 | news_api1_data = { 7 | "author": "H.O. Ward", 8 | "headline": "Will statically typed python become a thing?", 9 | "details": { 10 | "content": "Over the past 10 years ...[+]", 11 | "description": "Statically typing is getting ...[+]", 12 | "views": 32, 13 | "comments": 2, 14 | }, 15 | "seo": {"tags": ["python", "programming"], "kwrds": ["guido", "python"]}, 16 | } 17 | 18 | news_api2_data = { 19 | "author": "H. Gogu", 20 | "news_title": "Best python extensions", 21 | "full_article": "Let's explore the best extensions for python", 22 | "views": 32, 23 | "comments": 2, 24 | } 25 | 26 | 27 | @dataclass 28 | class ArticleStats: 29 | views: int 30 | num_comments: int 31 | 32 | 33 | @dataclass 34 | class Article: 35 | author: str 36 | title: str 37 | content: str 38 | stats: ArticleStats 39 | 40 | 41 | api1_routing = { 42 | Article: Route( 43 | title="headline", 44 | content="details/content", 45 | stats="", # Give the whole dictionary to ArticleStats for conversion 46 | ), 47 | ArticleStats: Route(views="details/views", num_comments="details/comments"), 48 | } 49 | 50 | api2_routing = { 51 | Article: Route(title="news_title", content="full_article", stats=""), 52 | ArticleStats: Route(num_comments="comments"), 53 | } 54 | 55 | 56 | article1 = from_dict(Article, news_api1_data, routing=api1_routing) 57 | article2 = from_dict(Article, news_api2_data, routing=api2_routing) 58 | 59 | print(article1) 60 | print(article2) 61 | -------------------------------------------------------------------------------- /examples/news_multi_example.py: -------------------------------------------------------------------------------- 1 | from dataclasses import dataclass 2 | from typing import Annotated 3 | from dictgest import from_dict, Path, Route 4 | 5 | 6 | news_api1_data = { 7 | "author": "H.O. Ward", 8 | "headline": "Will statically typed python become a thing?", 9 | "details": { 10 | "content": "Over the past 10 years ...[+]", 11 | "description": "Statically typing is getting ...[+]", 12 | "views": 32, 13 | "comments": 2, 14 | }, 15 | "seo": {"tags": ["python", "programming"], "kwrds": ["guido", "python"]}, 16 | } 17 | 18 | news_api2_data = { 19 | "author": "H. Gogu", 20 | "news_title": "Best python extensions", 21 | "full_article": "Let's explore the best extensions for python", 22 | "views": 32, 23 | "comments": 2, 24 | } 25 | 26 | 27 | @dataclass 28 | class Article: 29 | author: str 30 | title: str 31 | content: str 32 | 33 | 34 | # Arguments passed to Route can be strings or dictgest.Path 35 | article_api1 = Route(title="headline", content="details/content") 36 | 37 | # New Routing for a new dict structure 38 | article_api2 = Route(title="news_title", content="full_article") 39 | 40 | 41 | article1 = from_dict(Article, news_api1_data, routing=article_api1) 42 | article2 = from_dict(Article, news_api2_data, routing=article_api2) 43 | 44 | print(article1) 45 | print(article2) 46 | -------------------------------------------------------------------------------- /examples/typeconvert_example.py: -------------------------------------------------------------------------------- 1 | from dataclasses import dataclass 2 | from datetime import datetime 3 | from dictgest.serdes import from_dict, typecast 4 | 5 | 6 | @typecast 7 | @dataclass 8 | class Measurment: 9 | temp: float 10 | humidity: float 11 | date: datetime 12 | 13 | 14 | @typecast 15 | class Sensor: 16 | def __init__( 17 | self, 18 | name: str, 19 | location: str, 20 | uptime: float, 21 | charging: bool, 22 | readings: list[Measurment], 23 | ): 24 | self.name = name 25 | self.location = location 26 | self.uptime = uptime 27 | self.charging = charging 28 | self.readings = readings 29 | 30 | def __repr__(self): 31 | return str(self.__dict__) 32 | 33 | 34 | data = { 35 | "name": "sigma sensor", 36 | "location": "district 9", 37 | "uptime": "39", 38 | "charging": "yes", 39 | "readings": [ 40 | {"temp": "20", "humidity": "0.4", "date": "2022/06/12"}, 41 | {"temp": "25", "humidity": "0.45", "date": "July 03 2021"}, 42 | {"temp": "30", "humidity": "0.39", "date": "2020-05-03"}, 43 | ], 44 | } 45 | 46 | 47 | sensor = from_dict(Sensor, data) 48 | print(sensor) 49 | -------------------------------------------------------------------------------- /pylintrc: -------------------------------------------------------------------------------- 1 | [MASTER] 2 | 3 | # A comma-separated list of package or module names from where C extensions may 4 | # be loaded. Extensions are loading into the active Python interpreter and may 5 | # run arbitrary code. 6 | extension-pkg-allow-list= 7 | 8 | # A comma-separated list of package or module names from where C extensions may 9 | # be loaded. Extensions are loading into the active Python interpreter and may 10 | # run arbitrary code. (This is an alternative name to extension-pkg-allow-list 11 | # for backward compatibility.) 12 | extension-pkg-whitelist= 13 | 14 | # Return non-zero exit code if any of these messages/categories are detected, 15 | # even if score is above --fail-under value. Syntax same as enable. Messages 16 | # specified are enabled, while categories only check already-enabled messages. 17 | fail-on= 18 | 19 | # Specify a score threshold to be exceeded before program exits with error. 20 | fail-under=10.0 21 | 22 | # Files or directories to be skipped. They should be base names, not paths. 23 | ignore=CVS 24 | 25 | # Add files or directories matching the regex patterns to the ignore-list. The 26 | # regex matches against paths and can be in Posix or Windows format. 27 | ignore-paths= 28 | 29 | # Files or directories matching the regex patterns are skipped. The regex 30 | # matches against base names, not paths. The default value ignores emacs file 31 | # locks 32 | ignore-patterns=^\.# 33 | 34 | # Python code to execute, usually for sys.path manipulation such as 35 | # pygtk.require(). 36 | #init-hook= 37 | 38 | # Use multiple processes to speed up Pylint. Specifying 0 will auto-detect the 39 | # number of processors available to use. 40 | jobs=1 41 | 42 | # Control the amount of potential inferred values when inferring a single 43 | # object. This can help the performance when dealing with large functions or 44 | # complex, nested conditions. 45 | limit-inference-results=100 46 | 47 | # List of plugins (as comma separated values of python module names) to load, 48 | # usually to register additional checkers. 49 | load-plugins= 50 | 51 | # Pickle collected data for later comparisons. 52 | persistent=yes 53 | 54 | # Minimum Python version to use for version dependent checks. Will default to 55 | # the version used to run pylint. 56 | py-version=3.10 57 | 58 | # Discover python modules and packages in the file system subtree. 59 | recursive=no 60 | 61 | # When enabled, pylint would attempt to guess common misconfiguration and emit 62 | # user-friendly hints instead of false-positive error messages. 63 | suggestion-mode=yes 64 | 65 | # Allow loading of arbitrary C extensions. Extensions are imported into the 66 | # active Python interpreter and may run arbitrary code. 67 | unsafe-load-any-extension=no 68 | 69 | 70 | [MESSAGES CONTROL] 71 | 72 | # Only show warnings with the listed confidence levels. Leave empty to show 73 | # all. Valid levels: HIGH, CONTROL_FLOW, INFERENCE, INFERENCE_FAILURE, 74 | # UNDEFINED. 75 | confidence= 76 | 77 | # Disable the message, report, category or checker with the given id(s). You 78 | # can either give multiple identifiers separated by comma (,) or put this 79 | # option multiple times (only on the command line, not in the configuration 80 | # file where it should appear only once). You can also use "--disable=all" to 81 | # disable everything first and then re-enable specific checks. For example, if 82 | # you want to run only the similarities checker, you can use "--disable=all 83 | # --enable=similarities". If you want to run only the classes checker, but have 84 | # no Warning level messages displayed, use "--disable=all --enable=classes 85 | # --disable=W". 86 | disable=raw-checker-failed, 87 | bad-inline-option, 88 | locally-disabled, 89 | file-ignored, 90 | suppressed-message, 91 | useless-suppression, 92 | deprecated-pragma, 93 | use-symbolic-message-instead, 94 | unidiomatic-typecheck, 95 | missing-module-docstring, 96 | too-few-public-methods 97 | 98 | # Enable the message, report, category or checker with the given id(s). You can 99 | # either give multiple identifier separated by comma (,) or put this option 100 | # multiple time (only on the command line, not in the configuration file where 101 | # it should appear only once). See also the "--disable" option for examples. 102 | enable=c-extension-no-member 103 | 104 | 105 | [REPORTS] 106 | 107 | # Python expression which should return a score less than or equal to 10. You 108 | # have access to the variables 'fatal', 'error', 'warning', 'refactor', 109 | # 'convention', and 'info' which contain the number of messages in each 110 | # category, as well as 'statement' which is the total number of statements 111 | # analyzed. This score is used by the global evaluation report (RP0004). 112 | evaluation=max(0, 0 if fatal else 10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10)) 113 | 114 | # Template used to display messages. This is a python new-style format string 115 | # used to format the message information. See doc for all details. 116 | #msg-template= 117 | 118 | # Set the output format. Available formats are text, parseable, colorized, json 119 | # and msvs (visual studio). You can also give a reporter class, e.g. 120 | # mypackage.mymodule.MyReporterClass. 121 | output-format=text 122 | 123 | # Tells whether to display a full report or only the messages. 124 | reports=no 125 | 126 | # Activate the evaluation score. 127 | score=yes 128 | 129 | 130 | [REFACTORING] 131 | 132 | # Maximum number of nested blocks for function / method body 133 | max-nested-blocks=5 134 | 135 | # Complete name of functions that never returns. When checking for 136 | # inconsistent-return-statements if a never returning function is called then 137 | # it will be considered as an explicit return statement and no message will be 138 | # printed. 139 | never-returning-functions=sys.exit,argparse.parse_error 140 | 141 | 142 | [BASIC] 143 | 144 | # Naming style matching correct argument names. 145 | argument-naming-style=snake_case 146 | 147 | # Regular expression matching correct argument names. Overrides argument- 148 | # naming-style. If left empty, argument names will be checked with the set 149 | # naming style. 150 | #argument-rgx= 151 | 152 | # Naming style matching correct attribute names. 153 | attr-naming-style=snake_case 154 | 155 | # Regular expression matching correct attribute names. Overrides attr-naming- 156 | # style. If left empty, attribute names will be checked with the set naming 157 | # style. 158 | #attr-rgx= 159 | 160 | # Bad variable names which should always be refused, separated by a comma. 161 | bad-names=foo, 162 | bar, 163 | baz, 164 | toto, 165 | tutu, 166 | tata 167 | 168 | # Bad variable names regexes, separated by a comma. If names match any regex, 169 | # they will always be refused 170 | bad-names-rgxs= 171 | 172 | # Naming style matching correct class attribute names. 173 | class-attribute-naming-style=any 174 | 175 | # Regular expression matching correct class attribute names. Overrides class- 176 | # attribute-naming-style. If left empty, class attribute names will be checked 177 | # with the set naming style. 178 | #class-attribute-rgx= 179 | 180 | # Naming style matching correct class constant names. 181 | class-const-naming-style=UPPER_CASE 182 | 183 | # Regular expression matching correct class constant names. Overrides class- 184 | # const-naming-style. If left empty, class constant names will be checked with 185 | # the set naming style. 186 | #class-const-rgx= 187 | 188 | # Naming style matching correct class names. 189 | class-naming-style=PascalCase 190 | 191 | # Regular expression matching correct class names. Overrides class-naming- 192 | # style. If left empty, class names will be checked with the set naming style. 193 | #class-rgx= 194 | 195 | # Naming style matching correct constant names. 196 | const-naming-style=UPPER_CASE 197 | 198 | # Regular expression matching correct constant names. Overrides const-naming- 199 | # style. If left empty, constant names will be checked with the set naming 200 | # style. 201 | #const-rgx= 202 | 203 | # Minimum line length for functions/classes that require docstrings, shorter 204 | # ones are exempt. 205 | docstring-min-length=-1 206 | 207 | # Naming style matching correct function names. 208 | function-naming-style=snake_case 209 | 210 | # Regular expression matching correct function names. Overrides function- 211 | # naming-style. If left empty, function names will be checked with the set 212 | # naming style. 213 | #function-rgx= 214 | 215 | # Good variable names which should always be accepted, separated by a comma. 216 | good-names=i, 217 | j, 218 | k, 219 | ex, 220 | Run, 221 | _ 222 | 223 | # Good variable names regexes, separated by a comma. If names match any regex, 224 | # they will always be accepted 225 | good-names-rgxs= 226 | 227 | # Include a hint for the correct naming format with invalid-name. 228 | include-naming-hint=no 229 | 230 | # Naming style matching correct inline iteration names. 231 | inlinevar-naming-style=any 232 | 233 | # Regular expression matching correct inline iteration names. Overrides 234 | # inlinevar-naming-style. If left empty, inline iteration names will be checked 235 | # with the set naming style. 236 | #inlinevar-rgx= 237 | 238 | # Naming style matching correct method names. 239 | method-naming-style=snake_case 240 | 241 | # Regular expression matching correct method names. Overrides method-naming- 242 | # style. If left empty, method names will be checked with the set naming style. 243 | #method-rgx= 244 | 245 | # Naming style matching correct module names. 246 | module-naming-style=snake_case 247 | 248 | # Regular expression matching correct module names. Overrides module-naming- 249 | # style. If left empty, module names will be checked with the set naming style. 250 | #module-rgx= 251 | 252 | # Colon-delimited sets of names that determine each other's naming style when 253 | # the name regexes allow several styles. 254 | name-group= 255 | 256 | # Regular expression which should only match function or class names that do 257 | # not require a docstring. 258 | no-docstring-rgx=^_ 259 | 260 | # List of decorators that produce properties, such as abc.abstractproperty. Add 261 | # to this list to register other decorators that produce valid properties. 262 | # These decorators are taken in consideration only for invalid-name. 263 | property-classes=abc.abstractproperty 264 | 265 | # Regular expression matching correct type variable names. If left empty, type 266 | # variable names will be checked with the set naming style. 267 | #typevar-rgx= 268 | 269 | # Naming style matching correct variable names. 270 | variable-naming-style=snake_case 271 | 272 | # Regular expression matching correct variable names. Overrides variable- 273 | # naming-style. If left empty, variable names will be checked with the set 274 | # naming style. 275 | #variable-rgx= 276 | 277 | 278 | [FORMAT] 279 | 280 | # Expected format of line ending, e.g. empty (any line ending), LF or CRLF. 281 | expected-line-ending-format= 282 | 283 | # Regexp for a line that is allowed to be longer than the limit. 284 | ignore-long-lines=^\s*(# )??$ 285 | 286 | # Number of spaces of indent required inside a hanging or continued line. 287 | indent-after-paren=4 288 | 289 | # String used as indentation unit. This is usually " " (4 spaces) or "\t" (1 290 | # tab). 291 | indent-string=' ' 292 | 293 | # Maximum number of characters on a single line. 294 | max-line-length=100 295 | 296 | # Maximum number of lines in a module. 297 | max-module-lines=1000 298 | 299 | # Allow the body of a class to be on the same line as the declaration if body 300 | # contains single statement. 301 | single-line-class-stmt=no 302 | 303 | # Allow the body of an if to be on the same line as the test if there is no 304 | # else. 305 | single-line-if-stmt=no 306 | 307 | 308 | [MISCELLANEOUS] 309 | 310 | # List of note tags to take in consideration, separated by a comma. 311 | notes=FIXME, 312 | XXX, 313 | TODO 314 | 315 | # Regular expression of note tags to take in consideration. 316 | #notes-rgx= 317 | 318 | 319 | [SPELLING] 320 | 321 | # Limits count of emitted suggestions for spelling mistakes. 322 | max-spelling-suggestions=4 323 | 324 | # Spelling dictionary name. Available dictionaries: none. To make it work, 325 | # install the 'python-enchant' package. 326 | spelling-dict= 327 | 328 | # List of comma separated words that should be considered directives if they 329 | # appear and the beginning of a comment and should not be checked. 330 | spelling-ignore-comment-directives=fmt: on,fmt: off,noqa:,noqa,nosec,isort:skip,mypy: 331 | 332 | # List of comma separated words that should not be checked. 333 | spelling-ignore-words= 334 | 335 | # A path to a file that contains the private dictionary; one word per line. 336 | spelling-private-dict-file= 337 | 338 | # Tells whether to store unknown words to the private dictionary (see the 339 | # --spelling-private-dict-file option) instead of raising a message. 340 | spelling-store-unknown-words=no 341 | 342 | 343 | [STRING] 344 | 345 | # This flag controls whether inconsistent-quotes generates a warning when the 346 | # character used as a quote delimiter is used inconsistently within a module. 347 | check-quote-consistency=no 348 | 349 | # This flag controls whether the implicit-str-concat should generate a warning 350 | # on implicit string concatenation in sequences defined over several lines. 351 | check-str-concat-over-line-jumps=no 352 | 353 | 354 | [SIMILARITIES] 355 | 356 | # Comments are removed from the similarity computation 357 | ignore-comments=yes 358 | 359 | # Docstrings are removed from the similarity computation 360 | ignore-docstrings=yes 361 | 362 | # Imports are removed from the similarity computation 363 | ignore-imports=yes 364 | 365 | # Signatures are removed from the similarity computation 366 | ignore-signatures=no 367 | 368 | # Minimum lines number of a similarity. 369 | min-similarity-lines=4 370 | 371 | 372 | [LOGGING] 373 | 374 | # The type of string formatting that logging methods do. `old` means using % 375 | # formatting, `new` is for `{}` formatting. 376 | logging-format-style=old 377 | 378 | # Logging modules to check that the string format arguments are in logging 379 | # function parameter format. 380 | logging-modules=logging 381 | 382 | 383 | [TYPECHECK] 384 | 385 | # List of decorators that produce context managers, such as 386 | # contextlib.contextmanager. Add to this list to register other decorators that 387 | # produce valid context managers. 388 | contextmanager-decorators=contextlib.contextmanager 389 | 390 | # List of members which are set dynamically and missed by pylint inference 391 | # system, and so shouldn't trigger E1101 when accessed. Python regular 392 | # expressions are accepted. 393 | generated-members= 394 | 395 | # Tells whether missing members accessed in mixin class should be ignored. A 396 | # class is considered mixin if its name matches the mixin-class-rgx option. 397 | ignore-mixin-members=yes 398 | 399 | # Tells whether to warn about missing members when the owner of the attribute 400 | # is inferred to be None. 401 | ignore-none=yes 402 | 403 | # This flag controls whether pylint should warn about no-member and similar 404 | # checks whenever an opaque object is returned when inferring. The inference 405 | # can return multiple potential results while evaluating a Python object, but 406 | # some branches might not be evaluated, which results in partial inference. In 407 | # that case, it might be useful to still emit no-member and other checks for 408 | # the rest of the inferred objects. 409 | ignore-on-opaque-inference=yes 410 | 411 | # List of class names for which member attributes should not be checked (useful 412 | # for classes with dynamically set attributes). This supports the use of 413 | # qualified names. 414 | ignored-classes=optparse.Values,thread._local,_thread._local 415 | 416 | # List of module names for which member attributes should not be checked 417 | # (useful for modules/projects where namespaces are manipulated during runtime 418 | # and thus existing member attributes cannot be deduced by static analysis). It 419 | # supports qualified module names, as well as Unix pattern matching. 420 | ignored-modules= 421 | 422 | # Show a hint with possible names when a member name was not found. The aspect 423 | # of finding the hint is based on edit distance. 424 | missing-member-hint=yes 425 | 426 | # The minimum edit distance a name should have in order to be considered a 427 | # similar match for a missing member name. 428 | missing-member-hint-distance=1 429 | 430 | # The total number of similar names that should be taken in consideration when 431 | # showing a hint for a missing member. 432 | missing-member-max-choices=1 433 | 434 | # Regex pattern to define which classes are considered mixins ignore-mixin- 435 | # members is set to 'yes' 436 | mixin-class-rgx=.*[Mm]ixin 437 | 438 | # List of decorators that change the signature of a decorated function. 439 | signature-mutators= 440 | 441 | 442 | [VARIABLES] 443 | 444 | # List of additional names supposed to be defined in builtins. Remember that 445 | # you should avoid defining new builtins when possible. 446 | additional-builtins= 447 | 448 | # Tells whether unused global variables should be treated as a violation. 449 | allow-global-unused-variables=yes 450 | 451 | # List of names allowed to shadow builtins 452 | allowed-redefined-builtins= 453 | 454 | # List of strings which can identify a callback function by name. A callback 455 | # name must start or end with one of those strings. 456 | callbacks=cb_, 457 | _cb 458 | 459 | # A regular expression matching the name of dummy variables (i.e. expected to 460 | # not be used). 461 | dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_ 462 | 463 | # Argument names that match this expression will be ignored. Default to name 464 | # with leading underscore. 465 | ignored-argument-names=_.*|^ignored_|^unused_ 466 | 467 | # Tells whether we should check for unused import in __init__ files. 468 | init-import=no 469 | 470 | # List of qualified module names which can have objects that can redefine 471 | # builtins. 472 | redefining-builtins-modules=six.moves,past.builtins,future.builtins,builtins,io 473 | 474 | 475 | [CLASSES] 476 | 477 | # Warn about protected attribute access inside special methods 478 | check-protected-access-in-special-methods=no 479 | 480 | # List of method names used to declare (i.e. assign) instance attributes. 481 | defining-attr-methods=__init__, 482 | __new__, 483 | setUp, 484 | __post_init__ 485 | 486 | # List of member names, which should be excluded from the protected access 487 | # warning. 488 | exclude-protected=_asdict, 489 | _fields, 490 | _replace, 491 | _source, 492 | _make 493 | 494 | # List of valid names for the first argument in a class method. 495 | valid-classmethod-first-arg=cls 496 | 497 | # List of valid names for the first argument in a metaclass class method. 498 | valid-metaclass-classmethod-first-arg=cls 499 | 500 | 501 | [DESIGN] 502 | 503 | # List of regular expressions of class ancestor names to ignore when counting 504 | # public methods (see R0903) 505 | exclude-too-few-public-methods= 506 | 507 | # List of qualified class names to ignore when counting class parents (see 508 | # R0901) 509 | ignored-parents= 510 | 511 | # Maximum number of arguments for function / method. 512 | max-args=5 513 | 514 | # Maximum number of attributes for a class (see R0902). 515 | max-attributes=7 516 | 517 | # Maximum number of boolean expressions in an if statement (see R0916). 518 | max-bool-expr=5 519 | 520 | # Maximum number of branch for function / method body. 521 | max-branches=12 522 | 523 | # Maximum number of locals for function / method body. 524 | max-locals=15 525 | 526 | # Maximum number of parents for a class (see R0901). 527 | max-parents=7 528 | 529 | # Maximum number of public methods for a class (see R0904). 530 | max-public-methods=20 531 | 532 | # Maximum number of return / yield for function / method body. 533 | max-returns=6 534 | 535 | # Maximum number of statements in function / method body. 536 | max-statements=50 537 | 538 | # Minimum number of public methods for a class (see R0903). 539 | min-public-methods=2 540 | 541 | 542 | [IMPORTS] 543 | 544 | # List of modules that can be imported at any level, not just the top level 545 | # one. 546 | allow-any-import-level= 547 | 548 | # Allow wildcard imports from modules that define __all__. 549 | allow-wildcard-with-all=no 550 | 551 | # Analyse import fallback blocks. This can be used to support both Python 2 and 552 | # 3 compatible code, which means that the block might have code that exists 553 | # only in one or another interpreter, leading to false positives when analysed. 554 | analyse-fallback-blocks=no 555 | 556 | # Deprecated modules which should not be used, separated by a comma. 557 | deprecated-modules= 558 | 559 | # Output a graph (.gv or any supported image format) of external dependencies 560 | # to the given file (report RP0402 must not be disabled). 561 | ext-import-graph= 562 | 563 | # Output a graph (.gv or any supported image format) of all (i.e. internal and 564 | # external) dependencies to the given file (report RP0402 must not be 565 | # disabled). 566 | import-graph= 567 | 568 | # Output a graph (.gv or any supported image format) of internal dependencies 569 | # to the given file (report RP0402 must not be disabled). 570 | int-import-graph= 571 | 572 | # Force import order to recognize a module as part of the standard 573 | # compatibility libraries. 574 | known-standard-library= 575 | 576 | # Force import order to recognize a module as part of a third party library. 577 | known-third-party=enchant 578 | 579 | # Couples of modules and preferred modules, separated by a comma. 580 | preferred-modules= 581 | 582 | 583 | [EXCEPTIONS] 584 | 585 | # Exceptions that will emit a warning when being caught. Defaults to 586 | # "BaseException, Exception". 587 | overgeneral-exceptions=BaseException, 588 | Exception 589 | -------------------------------------------------------------------------------- /requirements-dev.txt: -------------------------------------------------------------------------------- 1 | pytest==7.0.1 2 | pytest-cov==3.0.0 3 | python_dateutil>=2.8.2 4 | setuptools>=45.2.0 5 | mypy==0.960 6 | mypy-extensions==0.4.3 7 | pylint==2.13.9 8 | pylint-celery==0.3 9 | pylint-django==2.5.3 10 | pylint-flask==0.6 11 | pylint-plugin-utils==0.7 -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | python_dateutil>=2.8.2 2 | setuptools>=45.2.0 3 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | import setuptools 2 | 3 | with open("README.md", "r") as fh: 4 | long_description = fh.read() 5 | 6 | with open("requirements.txt", "r") as f: 7 | requirements = f.read().splitlines() 8 | 9 | setuptools.setup( 10 | name="dictgest", 11 | version="0.3.0", 12 | author="Bogdan Sandoi", 13 | author_email="bogdan.sandoi@gmail.com", 14 | description=("Advanced dictionary ingestion into python objects"), 15 | long_description=long_description, 16 | long_description_content_type="text/markdown", 17 | packages=setuptools.find_packages(exclude=("tests",)), 18 | classifiers=[ 19 | "Programming Language :: Python :: 3", 20 | "License :: OSI Approved :: MIT License", 21 | "Operating System :: OS Independent", 22 | ], 23 | python_requires=">=3.9", 24 | install_requires=requirements, 25 | url="https://github.com/bmsan/DictGest", 26 | ) 27 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bmsan/DictGest/5e07acce54cc601a35263ab052280bd2f6c0bf0a/tests/__init__.py -------------------------------------------------------------------------------- /tests/conftest.py: -------------------------------------------------------------------------------- 1 | import os 2 | import pytest 3 | 4 | if os.getenv('_PYTEST_RAISE', "0") != "0": 5 | 6 | @pytest.hookimpl(tryfirst=True) 7 | def pytest_exception_interact(call): 8 | raise call.excinfo.value 9 | 10 | @pytest.hookimpl(tryfirst=True) 11 | def pytest_internalerror(excinfo): 12 | raise excinfo.value -------------------------------------------------------------------------------- /tests/test_basic.py: -------------------------------------------------------------------------------- 1 | from typing import Annotated, Any, Protocol, runtime_checkable 2 | import pytest 3 | from dataclasses import dataclass 4 | from dictgest import from_dict, typecast 5 | from dictgest import Path 6 | 7 | 8 | def check_fields(obj, ref_dict): 9 | for key, val in obj.__dict__.items(): 10 | if key in ref_dict: 11 | assert val == ref_dict[key] 12 | 13 | 14 | def test_conversion(): 15 | @typecast 16 | class A: 17 | # def __init__(self, a: Any, b: int, c: list[tuple[int, dict]], d: Annotated[list[str], Path('/a/b/c')]=1, e=2, f=3) -> None: 18 | def __init__(self, a: Any, b: int, c, d: float, e=2, f=3) -> None: 19 | self.a = a 20 | self.b = b 21 | self.c = c 22 | self.d = d 23 | self.e = e 24 | self.f = f 25 | 26 | data = {"a": 10, "b": 20, "c": 30, "d": 40, "e": 50, "extra_field": 100} 27 | 28 | result = from_dict(A, data) 29 | for key, val in result.__dict__.items(): 30 | if key in data: 31 | assert val == data[key] 32 | 33 | 34 | def test_conversion_dataclass(): 35 | @dataclass 36 | class A: 37 | a: int 38 | b: int 39 | c: int 40 | d: int = 4 41 | e: int = 5 42 | 43 | data = {"a": 10, "b": 20, "c": 30, "d": 40, "extra_field": 100} 44 | 45 | result = from_dict(A, data) 46 | check_fields(result, data) 47 | 48 | 49 | def test_type_conversion(): 50 | @typecast 51 | class A: 52 | # def __init__(self, a: Any, b: int, c: list[tuple[int, dict]], d: Annotated[list[str], Path('/a/b/c')]=1, e=2, f=3) -> None: 53 | def __init__(self, a: Any, b: int, c: str, d: float, e=2, f=3) -> None: 54 | self.a = a 55 | self.b = b 56 | self.c = c 57 | self.d = d 58 | self.e = e 59 | self.f = f 60 | 61 | data = {"a": 10, "b": 20.5, "c": 30, "d": 40, "e": 50, "extra_field": 100} 62 | ref_data = {"a": 10, "b": 20, "c": "30", "d": 40, "e": 50, "f": 3} 63 | 64 | result = from_dict(A, data, convert_types=None) 65 | check_fields(result, data) 66 | 67 | result = from_dict(A, data) 68 | check_fields(result, ref_data) 69 | 70 | 71 | def test_type_conversion_annotated(): 72 | @typecast 73 | class A: 74 | # def __init__(self, a: Any, b: int, c: list[tuple[int, dict]], d: Annotated[list[str], Path('/a/b/c')]=1, e=2, f=3) -> None: 75 | def __init__( 76 | self, 77 | a: Any, 78 | b: int, 79 | c: Annotated[str, Path("ccc")], 80 | d: Annotated[float, Path("x/y/d")], 81 | e=2, 82 | f=3, 83 | ) -> None: 84 | self.a = a 85 | self.b = b 86 | self.c = c 87 | self.d = d 88 | self.e = e 89 | self.f = f 90 | 91 | data = { 92 | "a": 10, 93 | "b": 20.5, 94 | "ccc": 30, 95 | "x": {"y": {"d": 40}}, 96 | "e": 50, 97 | "extra_field": 100, 98 | } 99 | ref_data = {"a": 10, "b": 20, "c": "30", "d": 40, "e": 50, "f": 3} 100 | 101 | result = from_dict(A, data) 102 | check_fields(result, ref_data) 103 | 104 | 105 | def test_type_conversion_advanced(): 106 | @typecast 107 | class C: 108 | # def __init__(self, a: Any, b: int, c: list[tuple[int, dict]], d: Annotated[list[str], Path('/a/b/c')]=1, e=2, f=3) -> None: 109 | def __init__(self, x: Any, y: int, z: str) -> None: 110 | self.x = x 111 | self.y = y 112 | self.z = z 113 | 114 | @typecast 115 | @dataclass 116 | class B: 117 | a: list[tuple[float, str]] 118 | b: Any 119 | c: dict[float, list[C]] 120 | d: int = 4 121 | e: int = 5 122 | 123 | @typecast 124 | class A: 125 | def __init__(self, a: Any, b: int, c: B, d: float, e=2, f=3) -> None: 126 | self.a = a 127 | self.b = b 128 | self.c = c 129 | self.d = d 130 | self.e = e 131 | self.f = f 132 | 133 | data = { 134 | "a": 10, 135 | "b": 20.5, 136 | "c": { 137 | "a": (["3.14", "el0"], [20, "el2"], ["31", "el2"]), 138 | "b": "second", 139 | "c": { 140 | 10: [{"x": 1, "y": 2, "z": 3}, {"x": 10, "y": "20", "z": 30}], 141 | "20": ({"x": 11, "y": "12", "z": 13}, {"x": 110, "y": 120, "z": 130}), 142 | }, 143 | }, 144 | "d": 40, 145 | "e": 50, 146 | "extra_field": 100, 147 | } 148 | 149 | result = from_dict(A, data) 150 | print(result) 151 | 152 | 153 | def test_type_conversion_advanced_annotated(): 154 | @typecast 155 | class C: 156 | def __init__(self, x: Any, y: int, z: str) -> None: 157 | self.x = x 158 | self.y = y 159 | self.z = z 160 | 161 | @typecast 162 | @dataclass 163 | class B: 164 | a: list[tuple[float, str]] 165 | b: Any 166 | c: Annotated[dict[float, list[C]], Path("c1/c2/c3")] 167 | d: int = 4 168 | e: int = 5 169 | 170 | @typecast 171 | class A: 172 | def __init__( 173 | self, a: Any, b: int, c: Annotated[B, Path("ccc")], d: float, e=2, f=3 174 | ) -> None: 175 | self.a = a 176 | self.b = b 177 | self.c = c 178 | self.d = d 179 | self.e = e 180 | self.f = f 181 | 182 | data = { 183 | "a": 10, 184 | "b": 20.5, 185 | "ccc": { 186 | "a": (["3.14", "el0"], [20, "el2"], ["31", "el2"]), 187 | "b": "second", 188 | "c1": { 189 | "c2": { 190 | "c3": { 191 | 10: [{"x": 1, "y": 2, "z": 3}, {"x": 10, "y": "20", "z": 30}], 192 | "20": ( 193 | {"x": 11, "y": "12", "z": 13}, 194 | {"x": 110, "y": 120, "z": 130}, 195 | ), 196 | } 197 | } 198 | }, 199 | }, 200 | "d": 40, 201 | "e": 50, 202 | "extra_field": 100, 203 | } 204 | 205 | result = from_dict(A, data) 206 | print(result) 207 | -------------------------------------------------------------------------------- /tests/test_convert.py: -------------------------------------------------------------------------------- 1 | from dataclasses import dataclass 2 | from typing import Annotated 3 | import pytest 4 | from dictgest.cast import convert 5 | from dictgest import Path, typecast, from_dict, default_convertor 6 | from .utils import check_fields 7 | from datetime import datetime 8 | 9 | 10 | def test_float(): 11 | r = convert(3.4, float) 12 | assert r == 3.4 13 | assert isinstance(r, float) 14 | 15 | r = convert("3.4", float) 16 | 17 | assert r == 3.4 18 | assert isinstance(r, float) 19 | 20 | 21 | def test_int(): 22 | r = convert(3.4, int) 23 | assert r == 3 24 | assert isinstance(r, int) 25 | 26 | 27 | def test_iterators(): 28 | r = convert([3.4, "7", "8"], list[int]) 29 | assert r == [3, 7, 8] 30 | 31 | r = convert([3.4, "7", "8"], tuple[float, str, int]) 32 | assert r == (3.4, "7", 8) 33 | 34 | r = convert([3.4, "7", "8"], set[str]) 35 | assert r == set(["3.4", "7", "8"]) 36 | 37 | 38 | def test_nested_iterators(): 39 | r = convert( 40 | [ 41 | (1, 2, 3, "4"), 42 | ("3.4", "5", 7.6, 9.2), 43 | (("a", "bc", "de"), ["a", "a", "b", "a"]), 44 | ], 45 | tuple[list[int], set[float], list[set[str]]], 46 | ) 47 | 48 | 49 | def test_dict(): 50 | r = convert({"aa": "3.4", 4: 6.5, "bb": "1"}, dict[str, float]) 51 | assert r == {"aa": 3.4, "4": 6.5, "bb": 1} 52 | 53 | r = convert({"aa": ["3.4"], 4: [6.5], "bb": ["1"]}, dict[str, tuple]) 54 | assert r == {"aa": ("3.4",), "4": (6.5,), "bb": ("1",)} 55 | 56 | r = convert({"aa": ["3.4"], 4: [6.5], "bb": ["1"]}, dict[str, tuple[float]]) 57 | assert r == {"aa": (3.4,), "4": (6.5,), "bb": (1,)} 58 | 59 | r = convert({"aa": ["3.4"], 4: [6.5], "bb": ["1"]}, dict) 60 | print(r, type(r)) 61 | 62 | r = convert({"aa": ["3.4"], 4: [6.5], "bb": ["1"]}, dict) 63 | assert r == {"aa": ["3.4"], 4: [6.5], "bb": ["1"]} 64 | 65 | 66 | def test_mappings(): 67 | data = { 68 | "a": 3.4, 69 | "b": 4, 70 | "c": { 71 | "d": 10.1, 72 | "de": {"e": 10.2, "f": [{"g": 10.3}, {"g": 11}, {"g": 12.1}, {"g": 13.2}]}, 73 | }, 74 | } 75 | 76 | @dataclass 77 | class A: 78 | a: int 79 | b: float 80 | d: Annotated[int, Path("c/d")] 81 | e: Annotated[int, Path("c/de/e")] 82 | f: Annotated[list[int], Path("c/de/f/g")] 83 | 84 | a = from_dict(A, data, convert_types=False) 85 | check_fields( 86 | a, {"a": 3.4, "b": 4, "d": 10.1, "e": 10.2, "f": [10.3, 11, 12.1, 13.2]} 87 | ) 88 | 89 | a = from_dict(A, data) 90 | check_fields(a, {"a": 3, "b": 4.0, "d": 10, "e": 10, "f": [10, 11, 12, 13]}) 91 | 92 | a = from_dict(A, data, None) 93 | check_fields(a, {"a": 3, "b": 4.0, "d": 10, "e": 10, "f": [10, 11, 12, 13]}) 94 | 95 | type_mapping = {float: lambda x: f"f{x}", int: lambda x: f"i{x}"} 96 | a = from_dict(A, data, type_mapping) 97 | 98 | # we only convert elements of different types 99 | check_fields( 100 | a, 101 | { 102 | "a": "i3.4", 103 | "b": "f4", 104 | "d": "i10.1", 105 | "e": "i10.2", 106 | "f": ["i10.3", 11, "i12.1", "i13.2"], 107 | }, 108 | ) 109 | 110 | 111 | def test_default_converter(): 112 | data = { 113 | "x": "lol", 114 | "a": "yes", 115 | "a1": 1, 116 | "a2": True, 117 | "b": "false", 118 | "c": 0, 119 | "d": "2020-01-01", 120 | "e": 1640988000, 121 | "f": datetime(2022, 1, 1, 0, 0), 122 | "g": "test_marker", 123 | } 124 | 125 | bool_convertor = default_convertor.get_converter(bool) 126 | 127 | def custom_bool_convertor(val): 128 | if val == "lol": 129 | return True 130 | return bool_convertor(val) 131 | 132 | global hit_count 133 | hit_count = 0 134 | 135 | def custom_generic_convertor(val): 136 | global hit_count 137 | hit_count += 1 138 | assert hit_count == 1 139 | assert val == "test_marker" 140 | return [["abc", "def"]] 141 | 142 | default_convertor.register(bool, custom_bool_convertor) 143 | default_convertor.register(list[list[str]], custom_generic_convertor) 144 | 145 | @dataclass 146 | class AA: 147 | x: bool 148 | a: bool 149 | a1: bool 150 | a2: bool 151 | b: bool 152 | c: bool 153 | d: datetime 154 | e: datetime 155 | f: datetime 156 | g: list[list[str]] 157 | 158 | a = from_dict(AA, data) 159 | check_fields( 160 | a, 161 | { 162 | "x": True, 163 | "a": True, 164 | "a1": True, 165 | "a2": True, 166 | "b": False, 167 | "c": False, 168 | "d": datetime(2020, 1, 1, 0, 0), 169 | "e": datetime(2021, 12, 31, 22, 0), 170 | "f": datetime(2022, 1, 1, 0, 0), 171 | }, 172 | ) 173 | assert hit_count == 1 174 | 175 | 176 | def test_negative(): 177 | with pytest.raises(Exception): 178 | convert(list[{"a": 1, "b": 2}, [1, 2]], list[dict]) 179 | 180 | with pytest.raises(Exception): 181 | convert([3.14, "str"], dict[str, list]) 182 | 183 | 184 | def test_flatten(): 185 | data = { 186 | "a": 3.4, 187 | "b": 4, 188 | "c": { 189 | "de": { 190 | "e": 10.2, 191 | "f": [ 192 | {"g": [10.3, 100]}, 193 | {"g": [11, 200]}, 194 | {"g": [12.1, 300]}, 195 | {"g": [13.2, 400]}, 196 | ], 197 | }, 198 | }, 199 | } 200 | 201 | @dataclass 202 | class A: 203 | a: int 204 | b: float 205 | f: Annotated[list[int], Path("c/de/f/g")] 206 | g: Annotated[list[int], Path("c/de/f/g", flatten_en=False)] 207 | 208 | a = from_dict(A, data, convert_types=False) 209 | check_fields( 210 | a, 211 | { 212 | "a": 3.4, 213 | "b": 4, 214 | "d": 10.1, 215 | "e": 10.2, 216 | "f": [10.3, 100, 11, 200, 12.1, 300, 13.2, 400], 217 | "g": [[10.3, 100], [11, 200], [12.1, 300], [13.2, 400]], 218 | }, 219 | ) 220 | -------------------------------------------------------------------------------- /tests/test_neg.py: -------------------------------------------------------------------------------- 1 | from typing import Annotated, Any, Protocol, runtime_checkable 2 | import pytest 3 | from dataclasses import dataclass 4 | from dictgest import from_dict, typecast 5 | from dictgest import Path 6 | from dictgest.routes import Chart, Route 7 | 8 | 9 | def check_fields(obj, ref_dict): 10 | for key, val in obj.__dict__.items(): 11 | if key in ref_dict: 12 | assert val == ref_dict[key] 13 | 14 | 15 | def test_missing_dict_field(): 16 | @dataclass 17 | class A1: 18 | a: int 19 | b: float 20 | c: str 21 | d: int 22 | 23 | data = {"a": 10, "b": 20, "d": 40} 24 | 25 | with pytest.raises(Exception): 26 | result = from_dict(A1, data) 27 | 28 | 29 | def test_type_error(): 30 | @dataclass 31 | class A1: 32 | a: int 33 | b: float 34 | d: int 35 | 36 | data = {"a": 10, "b": {"f": 20}, "d": 40} 37 | 38 | with pytest.raises(TypeError): 39 | from_dict(A1, data) 40 | 41 | @dataclass 42 | class A2: 43 | a: bool 44 | 45 | data = {"a": "yesok"} 46 | 47 | with pytest.raises(ValueError): 48 | from_dict(A2, data) 49 | 50 | 51 | def test_route_error(): 52 | @dataclass 53 | class A1: 54 | a: int 55 | b: float 56 | d: int 57 | 58 | data = {"a": 10, "b": 20, "de": 40} 59 | 60 | with pytest.raises(ValueError): 61 | from_dict(A1, data, routing=Route(f="d")) 62 | 63 | 64 | def test_multiple_path_error(): 65 | @dataclass 66 | class A1: 67 | a: int 68 | b: float 69 | d: Annotated[int, Path("de")] 70 | 71 | data = {"a": 10, "b": 20, "de": 40} 72 | 73 | with pytest.raises(ValueError): 74 | from_dict(A1, data, routing=Route(d="d")) 75 | 76 | 77 | def test_init(): 78 | with pytest.raises(TypeError): 79 | Chart() 80 | 81 | with pytest.raises(TypeError): 82 | Chart(Route(x="a")) 83 | 84 | with pytest.raises(TypeError): 85 | Route(x={}) 86 | 87 | with pytest.raises(TypeError): 88 | Route(x=None) 89 | 90 | with pytest.raises(ValueError): 91 | Route() 92 | 93 | @dataclass 94 | class A1: 95 | a: int 96 | 97 | with pytest.raises(ValueError): 98 | Chart({A1: Route(b="")}) 99 | 100 | 101 | def test_path_wildcard(): 102 | @dataclass 103 | class A1: 104 | a: Annotated[list[str], Path("*{a==3}")] 105 | 106 | with pytest.raises(TypeError): 107 | from_dict(A1, {"a": 3.14}) 108 | -------------------------------------------------------------------------------- /tests/test_path.py: -------------------------------------------------------------------------------- 1 | from dictgest import Path 2 | 3 | 4 | def test_basic(): 5 | p = Path("") 6 | data = { 7 | "a": 1, 8 | "b": 2, 9 | "c": {"d": 4}, 10 | "e": [{"g": 120}, {"f": 30, "g": 100}, {"f": 20, "g": 14}, {"f": 30, "g": 200}], 11 | } 12 | res = p.extract(data) 13 | assert res == data 14 | 15 | p = Path("a") 16 | res = p.extract(data) 17 | assert res == 1 18 | 19 | p = Path("c") 20 | res = p.extract(data) 21 | assert res == {"d": 4} 22 | 23 | p = Path("c/d") 24 | res = p.extract(data) 25 | assert res == 4 26 | 27 | p = Path("e/*{f=30}/g") 28 | res = p.extract(data) 29 | print(res) 30 | assert res == [100, 200] 31 | 32 | p = Path("e/f") 33 | res = p.extract(data) 34 | print(res) 35 | assert res == [30, 20, 30] 36 | -------------------------------------------------------------------------------- /tests/test_route_template.py: -------------------------------------------------------------------------------- 1 | from dataclasses import dataclass 2 | from typing import Annotated 3 | import pytest 4 | from dictgest.cast import convert 5 | from dictgest import Path, typecast, from_dict, default_convertor 6 | from dictgest.routes import Chart 7 | from dictgest.serdes import Route 8 | from .utils import check_fields 9 | from datetime import datetime 10 | 11 | 12 | def test_route_template(): 13 | data = { 14 | "a": 3.4, 15 | "b": 4, 16 | "c": { 17 | "d": 10.1, 18 | "de": {"e": 10.2, "f": [{"g": 10.3}, {"g": 11}, {"g": 12.1}, {"g": 13.2}]}, 19 | }, 20 | } 21 | 22 | @dataclass 23 | class A1: 24 | a: int 25 | b: float 26 | d: Annotated[int, "some other annotation"] 27 | e: int 28 | f: list[int] 29 | 30 | A1_route = Route(d="c/d", e=Path("c/de/e"), f="c/de/f/g") 31 | 32 | a = from_dict(A1, data, convert_types=False, routing=A1_route) 33 | check_fields( 34 | a, {"a": 3.4, "b": 4, "d": 10.1, "e": 10.2, "f": [10.3, 11, 12.1, 13.2]} 35 | ) 36 | 37 | a = from_dict(A1, data, routing=A1_route) 38 | check_fields(a, {"a": 3, "b": 4.0, "d": 10, "e": 10, "f": [10, 11, 12, 13]}) 39 | 40 | 41 | def test_chart_template(): 42 | data = { 43 | "a": 3.4, 44 | "b": 4, 45 | "c": { 46 | "d": 10.1, 47 | "de": {"e": 10.2, "f": [{"g": 10.3}, {"g": 11}, {"g": 12.1}, {"g": 13.2}]}, 48 | }, 49 | } 50 | 51 | @dataclass 52 | class B1: 53 | x: float 54 | y: list[float] 55 | 56 | @dataclass 57 | class A1: 58 | a: int 59 | b: float 60 | d: Annotated[int, "some other annotation"] 61 | e: B1 62 | f: list[int] 63 | 64 | @dataclass 65 | class C1: 66 | a: int 67 | b: float 68 | d: Annotated[int, "some other annotation"] 69 | f: list[int] 70 | 71 | routes = { 72 | A1: Route(d="c/d", e=Path("c/de"), f="c/de/f/g"), 73 | B1: Route(x="e", y=Path("f/g")), 74 | } 75 | 76 | # a = from_dict(A1, data, convert_types=False, routing=routes) 77 | # check_fields( 78 | # a, {"a": 3.4, "b": 4, "d": 10.1, "e": 10.2, "f": [10.3, 11, 12.1, 13.2]} 79 | # ) 80 | 81 | a = from_dict(A1, data, routing=routes) 82 | check_fields(a, {"a": 3, "b": 4.0, "d": 10, "f": [10, 11, 12, 13]}) 83 | check_fields(a.e, {"x": 10.2, "y": [10.3, 11, 12.1, 13.2]}) 84 | 85 | a = from_dict(A1, data, routing=Chart(routes)) 86 | check_fields(a, {"a": 3, "b": 4.0, "d": 10, "f": [10, 11, 12, 13]}) 87 | check_fields(a.e, {"x": 10.2, "y": [10.3, 11, 12.1, 13.2]}) 88 | 89 | route = Route(d="c/d", f="c/de/f/g") 90 | 91 | c = from_dict(C1, data, routing=route) 92 | check_fields(c, {"a": 3, "b": 4.0, "d": 10, "f": [10, 11, 12, 13]}) 93 | 94 | c = from_dict(C1, data, routing={C1: route}) 95 | check_fields(c, {"a": 3, "b": 4.0, "d": 10, "f": [10, 11, 12, 13]}) 96 | 97 | c = from_dict(C1, data, routing=Chart({C1: route})) 98 | check_fields(c, {"a": 3, "b": 4.0, "d": 10, "f": [10, 11, 12, 13]}) 99 | 100 | c = from_dict(C1, data, convert_types=False, routing=route) 101 | check_fields(c, {"a": 3.4, "b": 4.0, "d": 10.1, "f": [10.3, 11, 12.1, 13.2]}) 102 | -------------------------------------------------------------------------------- /tests/test_table.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | from typing import Annotated, Any, Protocol, runtime_checkable 3 | import pytest 4 | from dataclasses import dataclass 5 | from dictgest import from_dict, typecast 6 | from dictgest import Path 7 | import dictgest as dg 8 | from .utils import check_fields 9 | 10 | 11 | def test_single_item(): 12 | @typecast 13 | class SenzorData: 14 | def __init__( 15 | self, 16 | timestamps: list[datetime.datetime], 17 | temperatures: list[float], 18 | humidity: list[float], 19 | ) -> None: 20 | self.timestamps = timestamps 21 | self.temperatures = temperatures 22 | self.humidity = humidity 23 | 24 | header = ["humidity", "temperatures", "timestamps"] 25 | table_data = [ 26 | [0.4, 7.4, "1Dec2022"], 27 | [0.6, 5.4, "2Dec2022"], 28 | ] 29 | 30 | ref = { 31 | "humidity": [0.4, 0.6], 32 | "temperatures": [7.4, 5.4], 33 | "timestamps": [ 34 | datetime.datetime(year=2022, month=12, day=1), 35 | datetime.datetime(year=2022, month=12, day=2), 36 | ], 37 | } 38 | 39 | result = dg.table_to_item(SenzorData, table_data, header) 40 | check_fields(result, ref) 41 | 42 | with pytest.raises(ValueError): 43 | result = dg.table_to_item(SenzorData, table_data, header, transpose=True) 44 | list(result) 45 | 46 | table_data = [ 47 | [7.4, 5.4], 48 | ["1Dec2022", "2Dec2022"], 49 | [0.4, 0.6], 50 | ] 51 | 52 | header = ["temperatures", "timestamps", "humidity"] 53 | result = dg.table_to_item(SenzorData, table_data, header, transpose=True) 54 | check_fields(result, ref) 55 | 56 | with pytest.raises(ValueError): 57 | result = dg.table_to_item(SenzorData, table_data, header) 58 | list(result) 59 | 60 | 61 | def test_multi_item(): 62 | @typecast 63 | class SenzorData: 64 | def __init__( 65 | self, 66 | timestamp: datetime.datetime, 67 | temperature: float, 68 | humidity: float, 69 | ) -> None: 70 | self.timestamp = timestamp 71 | self.temperature = temperature 72 | self.humidity = humidity 73 | 74 | refs = [ 75 | { 76 | "humidity": 0.4, 77 | "temperature": 7.4, 78 | "timestamp": datetime.datetime(year=2022, month=12, day=1), 79 | }, 80 | { 81 | "humidity": 0.6, 82 | "temperature": 5.4, 83 | "timestamps": datetime.datetime(year=2022, month=12, day=2), 84 | }, 85 | ] 86 | 87 | header = ["humidity", "temperature", "timestamp"] 88 | table_data = [ 89 | [0.4, 7.4, "1Dec2022"], 90 | [0.6, 5.4, "2Dec2022"], 91 | ] 92 | 93 | result = dg.table_to_items(SenzorData, table_data, header) 94 | for res, ref in zip(result, refs): 95 | check_fields(res, ref) 96 | 97 | table_data = [ 98 | [7.4, 5.4], 99 | ["1Dec2022", "2Dec2022"], 100 | [0.4, 0.6], 101 | ] 102 | 103 | header = ["temperature", "timestamp", "humidity"] 104 | result = dg.table_to_items(SenzorData, table_data, header, transpose=True) 105 | for res, ref in zip(result, refs): 106 | check_fields(res, ref) 107 | 108 | with pytest.raises(ValueError): 109 | result = dg.table_to_items(SenzorData, table_data, header) 110 | list(result) 111 | -------------------------------------------------------------------------------- /tests/utils.py: -------------------------------------------------------------------------------- 1 | def check_fields(obj, ref_dict): 2 | for key, val in obj.__dict__.items(): 3 | if key in ref_dict: 4 | assert val == ref_dict[key] 5 | --------------------------------------------------------------------------------