├── .codacy.yml ├── .coveragerc ├── .dockerignore ├── .github ├── FUNDING.yml ├── ISSUE_TEMPLATE │ ├── bug_report.md │ └── feature_request.md └── workflows │ ├── ci.yml │ └── lint-pr.yml ├── .gitignore ├── .releaserc ├── LICENSE ├── README.md ├── poetry.lock ├── pyproject.toml ├── tests ├── test_issue_8.py ├── test_mapping_configurations.py ├── test_optionals.py ├── test_typed_json_dataclass.py └── test_utils.py └── typed_json_dataclass ├── __init__.py ├── typed_json_dataclass.py └── utils.py /.codacy.yml: -------------------------------------------------------------------------------- 1 | exclude_paths: 2 | - tests/** 3 | - .github/** 4 | -------------------------------------------------------------------------------- /.coveragerc: -------------------------------------------------------------------------------- 1 | [run] 2 | source=typed_json_dataclass 3 | -------------------------------------------------------------------------------- /.dockerignore: -------------------------------------------------------------------------------- 1 | dist/ 2 | __pycache__ 3 | -------------------------------------------------------------------------------- /.github/FUNDING.yml: -------------------------------------------------------------------------------- 1 | # These are supported funding model platforms 2 | 3 | github: # Replace with up to 4 GitHub Sponsors-enabled usernames e.g., [user1, user2] 4 | patreon: abatilo 5 | open_collective: # Replace with a single Open Collective username 6 | ko_fi: abatilo 7 | tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel 8 | community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry 9 | liberapay: # Replace with a single Liberapay username 10 | issuehunt: abatilo 11 | otechie: # Replace with a single Otechie username 12 | custom: # Replace with up to 4 custom sponsorship URLs e.g., ['link1', 'link2'] 13 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/bug_report.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Bug report 3 | about: Create a report to help us improve 4 | title: '' 5 | labels: bug 6 | assignees: '' 7 | 8 | --- 9 | 10 | ### Subject of the issue 11 | Describe your issue here. 12 | 13 | ### Steps to reproduce 14 | Tell us how to reproduce this issue. A forked branch is ideal, but instructions 15 | are helpful as well. 16 | 17 | ### Expected behaviour 18 | Tell us what you expect to have happened 19 | 20 | ### Actual behaviour 21 | Tell us what actually happens 22 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/feature_request.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Feature request 3 | about: Suggest an idea for this project 4 | title: '' 5 | labels: enhancement 6 | assignees: '' 7 | 8 | --- 9 | 10 | ### Is your feature request related to a problem? Please describe. 11 | A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] 12 | 13 | ### Describe the solution you'd like 14 | A clear and concise description of what you want to happen. 15 | 16 | ### Describe alternatives you've considered 17 | A clear and concise description of any alternative solutions or features you've considered. 18 | 19 | ### Additional context 20 | Add any other context or screenshots about the feature request here. 21 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: CI 2 | on: 3 | push: 4 | branches: 5 | - master 6 | pull_request: 7 | 8 | jobs: 9 | ci: 10 | runs-on: ubuntu-latest 11 | steps: 12 | - uses: actions/checkout@v2 13 | - uses: actions/setup-python@v2 14 | with: 15 | python-version: 3.7 16 | - name: Install poetry 17 | uses: abatilo/actions-poetry@v2.0.0 18 | - name: Install dependencies 19 | run: | 20 | poetry install 21 | - name: Run flake8 22 | run: | 23 | poetry run python -m flake8 --show-source --import-order-style pep8 typed_json_dataclass 24 | - name: Run pytest 25 | run: | 26 | poetry run python -m pytest --cov-report xml:codecov.xml --cov=typed_json_dataclass --cov-report=html --junit-xml=coverage.xml --cov-branch --cov-fail-under=100 tests/ 27 | - if: github.event_name == 'push' 28 | name: Publish 29 | env: 30 | PYPI_PASSWORD: ${{ secrets.PYPI_PASSWORD }} 31 | PYPI_USERNAME: ${{ secrets.PYPI_USERNAME }} 32 | CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} 33 | run: | 34 | poetry run codecov -t $CODECOV_TOKEN 35 | poetry publish --build --no-interaction -vv --username $PYPI_USERNAME --password $PYPI_PASSWORD 36 | - if: github.event_name == 'push' 37 | name: Generate release 38 | env: 39 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 40 | run: npx semantic-release 41 | -------------------------------------------------------------------------------- /.github/workflows/lint-pr.yml: -------------------------------------------------------------------------------- 1 | name: "Lint PR" 2 | on: 3 | pull_request: 4 | types: 5 | - opened 6 | - edited 7 | - synchronize 8 | 9 | jobs: 10 | main: 11 | runs-on: ubuntu-latest 12 | steps: 13 | - uses: amannn/action-semantic-pull-request@v1.2.0 14 | env: 15 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 16 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | dist/ 2 | __pycache__ 3 | *.egg-info 4 | *.pyc 5 | codecov.xml 6 | .coverage 7 | coverage.xml 8 | htmlcov/ 9 | -------------------------------------------------------------------------------- /.releaserc: -------------------------------------------------------------------------------- 1 | { 2 | "plugins": [ 3 | "@semantic-release/commit-analyzer", 4 | "@semantic-release/release-notes-generator", 5 | "@semantic-release/github" 6 | ] 7 | } 8 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2017 Channel Cat 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # typed_json_dataclass 2 | [![Codacy Badge](https://api.codacy.com/project/badge/Grade/4344420de20b4262a4912d81cb28d175)](https://www.codacy.com/app/abatilo/typed-json-dataclass?utm_source=github.com&utm_medium=referral&utm_content=abatilo/typed-json-dataclass&utm_campaign=Badge_Grade) 3 | [![codecov](https://codecov.io/gh/abatilo/typed-json-dataclass/branch/master/graph/badge.svg)](https://codecov.io/gh/abatilo/typed-json-dataclass) 4 | [![PyPI status](https://img.shields.io/pypi/status/typed_json_dataclass.svg)](https://pypi.python.org/pypi/typed_json_dataclass/) 5 | [![PyPI version](https://badge.fury.io/py/typed-json-dataclass.svg)](https://badge.fury.io/py/typed-json-dataclass) 6 | [![PyPI pyversions](https://img.shields.io/pypi/pyversions/typed-json-dataclass.svg)](https://pypi.python.org/pypi/typed-json-dataclass/) 7 | ![PyPI - Downloads](https://img.shields.io/pypi/dm/typed-json-dataclass.svg) 8 | [![MIT license](http://img.shields.io/badge/license-MIT-brightgreen.svg)](http://opensource.org/licenses/MIT) 9 | 10 | # This project is open to public contributions but is not actively maintained by the original author. 11 | 12 | `typed_json_dataclass` is a library that augments the Python3.7 13 | [dataclass](https://docs.python.org/3/library/dataclasses.html) feature in two 14 | major ways: 15 | 1. Add a way to recursively grab class dictionary definitions, thus making your 16 | dataclass JSON serializable 17 | 2. Add a light amount of type validation to your dataclasses, so that you can 18 | validate that the JSON you're being given matches the data types that you're 19 | expecting. 20 | 21 | By expressing your data as dataclasses, and by having your incoming data 22 | validated as it is received, you can easily implement the [Data Transfer Object 23 | (DTO)](https://martinfowler.com/eaaCatalog/dataTransferObject.html) pattern in 24 | your Python code. 25 | 26 | This library can be thought of as a combination of 27 | [attrs](https://github.com/python-attrs/attrs), 28 | [cattrs](https://github.com/Tinche/cattrs), and 29 | [marshmallow](https://github.com/marshmallow-code/marshmallow) 30 | 31 | ## Getting Started 32 | 33 | Install the library from PyPI: 34 | ``` 35 | pip install typed_json_dataclass 36 | ``` 37 | 38 | Use the dataclass decorator just like normal, but add the `TypedJsonMixin` from 39 | this library, to your class definition. This will add 4 new methods to all of your dataclasses: 40 | 1. from_dict() 41 | ```python 42 | @classmethod 43 | def from_dict(cls, raw_dict, *, mapping_mode=MappingMode.NoMap): 44 | """Given a python dict, create an instance of the implementing class. 45 | 46 | :raw_dict: A dictionary that represents the DTO to create 47 | :mapping_mode: Format for properties 48 | :returns: Returns an instance of the DTO, instantiated via the dict 49 | """ 50 | ``` 51 | 2. from_json() 52 | ```python 53 | @classmethod 54 | def from_json(cls, raw_json, *, mapping_mode=MappingMode.NoMap): 55 | """Given a raw json string, create an instance of the implementing class. 56 | 57 | :raw_json: A json string that represents the DTO to create 58 | :mapping_mode: Format for properties 59 | :returns: Returns an instance of the DTO, instantiated via the json 60 | """ 61 | ``` 62 | 3. to_dict() 63 | ```python 64 | def to_dict(self, *, keep_none=False, mapping_mode=MappingMode.NoMap, warn_on_initvar=True): 65 | """Express the DTO as a dictionary. 66 | 67 | :keep_none: Filter keys that are None 68 | :mapping_mode: Format for properties 69 | :warn_on_initvar: Emit a warning if the instance contains non-default 70 | init-only variables. 71 | :returns: Returns the instantiated DTO as a dictionary 72 | """ 73 | ``` 74 | 4. to_json() 75 | ```python 76 | def to_json(self, *, keep_none=False, mapping_mode=MappingMode.NoMap, warn_on_initvar=True): 77 | """Express the DTO as a json string. 78 | 79 | :keep_none: Filter keys that are None 80 | :mapping_mode: Format for properties 81 | :warn_on_initvar: Emit a warning if the instance contains non-default 82 | init-only variables. 83 | :returns: Returns the instantiated DTO as a json string 84 | """ 85 | ``` 86 | 87 | ## Examples 88 | 89 | ### Converting your dataclass to a JSON serializable format 90 | ```python 91 | from typing import List 92 | from dataclasses import dataclass 93 | from typed_json_dataclass import TypedJsonMixin 94 | 95 | @dataclass 96 | class Person(TypedJsonMixin): 97 | name: str 98 | age: int 99 | 100 | @dataclass 101 | class Family(TypedJsonMixin): 102 | people: List[Person] 103 | 104 | bob = Person(name='Bob', age=24) 105 | alice = Person(name='Alice', age=32) 106 | family = Family(people=[bob, alice]) 107 | 108 | print(family.to_json()) 109 | # => {"people": [{"name": "Bob", "age": 24}, {"name": "Alice", "age": 32}]} 110 | ``` 111 | 112 | 113 | If your data doesn't match the type definitions, you'll get a helpful error: 114 | ```python 115 | from dataclasses import dataclass 116 | from typed_json_dataclass import TypedJsonMixin 117 | 118 | @dataclass 119 | class Person(TypedJsonMixin): 120 | name: str 121 | age: int 122 | 123 | request_data = '{"name":"Bob","age":"24"}' 124 | 125 | bob = Person.from_json(request_data) 126 | # => TypeError: Person.age is expected to be , but value 24 with type was found instead 127 | ``` 128 | 129 | And you can parse data from a Python `dict` as well. Just use the `.from_dict()` function instead: 130 | ```python 131 | from dataclasses import dataclass 132 | from typed_json_dataclass import TypedJsonMixin 133 | 134 | @dataclass 135 | class Person(TypedJsonMixin): 136 | name: str 137 | age: int 138 | 139 | request_data_as_dict = { 140 | 'name': 'Alice', 141 | 'age': '32' 142 | } 143 | 144 | alice = Person.from_dict(request_data_as_dict) 145 | # => TypeError: Person.age is expected to be , but value 32 with type was found instead 146 | ``` 147 | 148 | ### Setting a mapping_mode for auto mapping 149 | ```python 150 | from dataclasses import dataclass 151 | from typed_json_dataclass import TypedJsonMixin, MappingMode 152 | 153 | @dataclass 154 | class Person(TypedJsonMixin): 155 | person_name: str 156 | person_age: int 157 | 158 | request_data_as_dict = { 159 | 'personName': 'Alice', 160 | 'personAge': 32 161 | } 162 | 163 | alice = Person.from_dict(request_data_as_dict, mapping_mode=MappingMode.SnakeCase) 164 | # => Person(person_name='Alice', person_age=32) 165 | ``` 166 | 167 | This mapping mode is useful for when you get requests that have the JSON in a 168 | camel case format, but you want your objects to be snake case and stay PEP8 169 | compliant. 170 | 171 | ## Limitations and Caveats 172 | 173 | ### Dataclasses with init-only variables 174 | 175 | Support for dataclasses with [init-only variables](https://docs.python.org/3/library/dataclasses.html#init-only-variables) 176 | is limited. Although `to_dict` and `to_json` will convert the dataclass, the 177 | resulting dict or JSON string will not contain the init-only variables, since 178 | their values are not available after initialization. This also means that such 179 | dataclasses cannot later be instantiated from a dict or JSON string, since the 180 | init-only variables are a required parameter in the dataclass' `__init__` 181 | method. `TypedJsonMixin` detects the usage of dataclasses with init-only 182 | variables, emits a warning when it is converted to a dict or JSON string, and 183 | refuses to instantiate a dataclass with init-only variables. 184 | 185 | A first workaround consists of providing a default value to the init-only 186 | variables: 187 | 188 | ```python 189 | @dataclass 190 | class Person(TypedJsonMixin): 191 | person_name: InitVar[str] = '' 192 | person_first_name: str = '' 193 | person_last_name: str = '' 194 | 195 | def __post_init__(self, person_name): 196 | if person_name: 197 | # Instantiated directly 198 | self.person_first_name, self.person_last_name = person_name.split() 199 | # Call TypedJsonMixin __post_init__ method 200 | super().__post_init__() 201 | ``` 202 | **Note**: Instantiations without arguments, such as `Person()`, are now 203 | possible, although the created instance would then be invalid. 204 | 205 | The second workaround is to remove init-only variables from the dataclass, and 206 | perform the `__post_init__` instantiation using a class method instead: 207 | 208 | ```python 209 | @dataclass 210 | class Person(TypedJsonMixin): 211 | person_first_name: str 212 | person_last_name: str 213 | 214 | @classmethod 215 | def create(cls, person_name): 216 | first_name, last_name = person_name.split() 217 | cls(first_name, last_name) 218 | ``` 219 | 220 | Finally, if the dataclass is not meant to ever be instantiated from a dict or 221 | JSON string, and only the `to_dict` or `to_json` methods are called, the 222 | warnings can be suppressed by passing `warn_on_initvar=False` as a keyword 223 | argument in the method call. 224 | -------------------------------------------------------------------------------- /poetry.lock: -------------------------------------------------------------------------------- 1 | [[package]] 2 | name = "atomicwrites" 3 | version = "1.3.0" 4 | description = "Atomic file writes." 5 | category = "dev" 6 | optional = false 7 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" 8 | 9 | [[package]] 10 | name = "attrs" 11 | version = "19.3.0" 12 | description = "Classes Without Boilerplate" 13 | category = "dev" 14 | optional = false 15 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" 16 | 17 | [package.extras] 18 | azure-pipelines = ["coverage", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "zope.interface", "pytest-azurepipelines"] 19 | dev = ["coverage", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "zope.interface", "sphinx", "pre-commit"] 20 | docs = ["sphinx", "zope.interface"] 21 | tests = ["coverage", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "zope.interface"] 22 | 23 | [[package]] 24 | name = "certifi" 25 | version = "2019.3.9" 26 | description = "Python package for providing Mozilla's CA Bundle." 27 | category = "dev" 28 | optional = false 29 | python-versions = "*" 30 | 31 | [[package]] 32 | name = "chardet" 33 | version = "3.0.4" 34 | description = "Universal encoding detector for Python 2 and 3" 35 | category = "dev" 36 | optional = false 37 | python-versions = "*" 38 | 39 | [[package]] 40 | name = "codecov" 41 | version = "2.1.11" 42 | description = "Hosted coverage reports for GitHub, Bitbucket and Gitlab" 43 | category = "dev" 44 | optional = false 45 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" 46 | 47 | [package.dependencies] 48 | coverage = "*" 49 | requests = ">=2.7.9" 50 | 51 | [[package]] 52 | name = "colorama" 53 | version = "0.4.1" 54 | description = "Cross-platform colored terminal text." 55 | category = "dev" 56 | optional = false 57 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" 58 | 59 | [[package]] 60 | name = "coverage" 61 | version = "5.5" 62 | description = "Code coverage measurement for Python" 63 | category = "dev" 64 | optional = false 65 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4" 66 | 67 | [package.extras] 68 | toml = ["toml"] 69 | 70 | [[package]] 71 | name = "flake8" 72 | version = "3.9.0" 73 | description = "the modular source code checker: pep8 pyflakes and co" 74 | category = "main" 75 | optional = false 76 | python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" 77 | 78 | [package.dependencies] 79 | importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} 80 | mccabe = ">=0.6.0,<0.7.0" 81 | pycodestyle = ">=2.7.0,<2.8.0" 82 | pyflakes = ">=2.3.0,<2.4.0" 83 | 84 | [[package]] 85 | name = "flake8-alfred" 86 | version = "1.1.1" 87 | description = "Flake8 plugin warning for unsafe functions" 88 | category = "dev" 89 | optional = false 90 | python-versions = ">=3.6" 91 | 92 | [package.dependencies] 93 | flake8 = "*" 94 | 95 | [[package]] 96 | name = "flake8-blind-except" 97 | version = "0.2.0" 98 | description = "A flake8 extension that checks for blind except: statements" 99 | category = "dev" 100 | optional = false 101 | python-versions = "*" 102 | 103 | [[package]] 104 | name = "flake8-bugbear" 105 | version = "21.4.3" 106 | description = "A plugin for flake8 finding likely bugs and design problems in your program. Contains warnings that don't belong in pyflakes and pycodestyle." 107 | category = "dev" 108 | optional = false 109 | python-versions = ">=3.6" 110 | 111 | [package.dependencies] 112 | attrs = ">=19.2.0" 113 | flake8 = ">=3.0.0" 114 | 115 | [package.extras] 116 | dev = ["coverage", "black", "hypothesis", "hypothesmith"] 117 | 118 | [[package]] 119 | name = "flake8-builtins" 120 | version = "1.5.3" 121 | description = "Check for python builtins being used as variables or parameters." 122 | category = "dev" 123 | optional = false 124 | python-versions = "*" 125 | 126 | [package.dependencies] 127 | flake8 = "*" 128 | 129 | [package.extras] 130 | test = ["coverage", "coveralls", "mock", "pytest", "pytest-cov"] 131 | 132 | [[package]] 133 | name = "flake8-class-newline" 134 | version = "1.6.0" 135 | description = "Flake8 lint for newline after class definitions." 136 | category = "dev" 137 | optional = false 138 | python-versions = "*" 139 | 140 | [package.dependencies] 141 | flake8 = "*" 142 | 143 | [[package]] 144 | name = "flake8-comprehensions" 145 | version = "3.4.0" 146 | description = "A flake8 plugin to help you write better list/set/dict comprehensions." 147 | category = "dev" 148 | optional = false 149 | python-versions = ">=3.6" 150 | 151 | [package.dependencies] 152 | flake8 = ">=3.0,<3.2.0 || >3.2.0,<4" 153 | importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} 154 | 155 | [[package]] 156 | name = "flake8-deprecated" 157 | version = "1.3" 158 | description = "Warns about deprecated method calls." 159 | category = "dev" 160 | optional = false 161 | python-versions = "*" 162 | 163 | [package.dependencies] 164 | flake8 = ">=3.0.0" 165 | 166 | [[package]] 167 | name = "flake8-import-order" 168 | version = "0.18.1" 169 | description = "Flake8 and pylama plugin that checks the ordering of import statements." 170 | category = "dev" 171 | optional = false 172 | python-versions = "*" 173 | 174 | [package.dependencies] 175 | pycodestyle = "*" 176 | 177 | [[package]] 178 | name = "flake8-mutable" 179 | version = "1.2.0" 180 | description = "mutable defaults flake8 extension" 181 | category = "dev" 182 | optional = false 183 | python-versions = "*" 184 | 185 | [package.dependencies] 186 | flake8 = "*" 187 | 188 | [[package]] 189 | name = "flake8-polyfill" 190 | version = "1.0.2" 191 | description = "Polyfill package for Flake8 plugins" 192 | category = "dev" 193 | optional = false 194 | python-versions = "*" 195 | 196 | [package.dependencies] 197 | flake8 = "*" 198 | 199 | [[package]] 200 | name = "flake8-quotes" 201 | version = "3.2.0" 202 | description = "Flake8 lint for quotes." 203 | category = "dev" 204 | optional = false 205 | python-versions = "*" 206 | 207 | [package.dependencies] 208 | flake8 = "*" 209 | 210 | [[package]] 211 | name = "flake8-tuple" 212 | version = "0.4.1" 213 | description = "Check code for 1 element tuple." 214 | category = "main" 215 | optional = false 216 | python-versions = "*" 217 | 218 | [package.dependencies] 219 | flake8 = "*" 220 | six = "*" 221 | 222 | [[package]] 223 | name = "idna" 224 | version = "2.8" 225 | description = "Internationalized Domain Names in Applications (IDNA)" 226 | category = "dev" 227 | optional = false 228 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" 229 | 230 | [[package]] 231 | name = "importlib-metadata" 232 | version = "0.17" 233 | description = "Read metadata from Python packages" 234 | category = "main" 235 | optional = false 236 | python-versions = ">=2.7,!=3.0,!=3.1,!=3.2,!=3.3" 237 | 238 | [package.dependencies] 239 | zipp = ">=0.5" 240 | 241 | [package.extras] 242 | docs = ["sphinx", "docutils (==0.12)", "rst.linker"] 243 | 244 | [[package]] 245 | name = "iniconfig" 246 | version = "1.0.1" 247 | description = "iniconfig: brain-dead simple config-ini parsing" 248 | category = "dev" 249 | optional = false 250 | python-versions = "*" 251 | 252 | [[package]] 253 | name = "mccabe" 254 | version = "0.6.1" 255 | description = "McCabe checker, plugin for flake8" 256 | category = "main" 257 | optional = false 258 | python-versions = "*" 259 | 260 | [[package]] 261 | name = "packaging" 262 | version = "19.0" 263 | description = "Core utilities for Python packages" 264 | category = "dev" 265 | optional = false 266 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" 267 | 268 | [package.dependencies] 269 | pyparsing = ">=2.0.2" 270 | six = "*" 271 | 272 | [[package]] 273 | name = "pep8-naming" 274 | version = "0.11.1" 275 | description = "Check PEP-8 naming conventions, plugin for flake8" 276 | category = "dev" 277 | optional = false 278 | python-versions = "*" 279 | 280 | [package.dependencies] 281 | flake8-polyfill = ">=1.0.2,<2" 282 | 283 | [[package]] 284 | name = "pluggy" 285 | version = "0.12.0" 286 | description = "plugin and hook calling mechanisms for python" 287 | category = "dev" 288 | optional = false 289 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" 290 | 291 | [package.dependencies] 292 | importlib-metadata = ">=0.12" 293 | 294 | [package.extras] 295 | dev = ["pre-commit", "tox"] 296 | 297 | [[package]] 298 | name = "py" 299 | version = "1.9.0" 300 | description = "library with cross-python path, ini-parsing, io, code, log facilities" 301 | category = "dev" 302 | optional = false 303 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" 304 | 305 | [[package]] 306 | name = "pycodestyle" 307 | version = "2.7.0" 308 | description = "Python style guide checker" 309 | category = "main" 310 | optional = false 311 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" 312 | 313 | [[package]] 314 | name = "pyflakes" 315 | version = "2.3.0" 316 | description = "passive checker of Python programs" 317 | category = "main" 318 | optional = false 319 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" 320 | 321 | [[package]] 322 | name = "pyparsing" 323 | version = "2.4.0" 324 | description = "Python parsing module" 325 | category = "dev" 326 | optional = false 327 | python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" 328 | 329 | [[package]] 330 | name = "pytest" 331 | version = "6.2.3" 332 | description = "pytest: simple powerful testing with Python" 333 | category = "dev" 334 | optional = false 335 | python-versions = ">=3.6" 336 | 337 | [package.dependencies] 338 | atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""} 339 | attrs = ">=19.2.0" 340 | colorama = {version = "*", markers = "sys_platform == \"win32\""} 341 | importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} 342 | iniconfig = "*" 343 | packaging = "*" 344 | pluggy = ">=0.12,<1.0.0a1" 345 | py = ">=1.8.2" 346 | toml = "*" 347 | 348 | [package.extras] 349 | testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xmlschema"] 350 | 351 | [[package]] 352 | name = "pytest-cov" 353 | version = "2.11.1" 354 | description = "Pytest plugin for measuring coverage." 355 | category = "dev" 356 | optional = false 357 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" 358 | 359 | [package.dependencies] 360 | coverage = ">=5.2.1" 361 | pytest = ">=4.6" 362 | 363 | [package.extras] 364 | testing = ["fields", "hunter", "process-tests (==2.0.2)", "six", "pytest-xdist", "virtualenv"] 365 | 366 | [[package]] 367 | name = "requests" 368 | version = "2.22.0" 369 | description = "Python HTTP for Humans." 370 | category = "dev" 371 | optional = false 372 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" 373 | 374 | [package.dependencies] 375 | certifi = ">=2017.4.17" 376 | chardet = ">=3.0.2,<3.1.0" 377 | idna = ">=2.5,<2.9" 378 | urllib3 = ">=1.21.1,<1.25.0 || >1.25.0,<1.25.1 || >1.25.1,<1.26" 379 | 380 | [package.extras] 381 | security = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)"] 382 | socks = ["PySocks (>=1.5.6,!=1.5.7)", "win-inet-pton"] 383 | 384 | [[package]] 385 | name = "six" 386 | version = "1.12.0" 387 | description = "Python 2 and 3 compatibility utilities" 388 | category = "main" 389 | optional = false 390 | python-versions = ">=2.6, !=3.0.*, !=3.1.*" 391 | 392 | [[package]] 393 | name = "toml" 394 | version = "0.10.1" 395 | description = "Python Library for Tom's Obvious, Minimal Language" 396 | category = "dev" 397 | optional = false 398 | python-versions = "*" 399 | 400 | [[package]] 401 | name = "urllib3" 402 | version = "1.25.3" 403 | description = "HTTP library with thread-safe connection pooling, file post, and more." 404 | category = "dev" 405 | optional = false 406 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, <4" 407 | 408 | [package.extras] 409 | brotli = ["brotlipy (>=0.6.0)"] 410 | secure = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "certifi", "ipaddress"] 411 | socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] 412 | 413 | [[package]] 414 | name = "zipp" 415 | version = "0.5.1" 416 | description = "Backport of pathlib-compatible object wrapper for zip files" 417 | category = "main" 418 | optional = false 419 | python-versions = ">=2.7" 420 | 421 | [package.extras] 422 | docs = ["sphinx", "jaraco.packaging (>=3.2)", "rst.linker (>=1.9)"] 423 | testing = ["pathlib2", "contextlib2", "unittest2"] 424 | 425 | [metadata] 426 | lock-version = "1.1" 427 | python-versions = "^3.7" 428 | content-hash = "4b7ca641e14f1eab4776d7b7c5ca13408f2102a855fa1c254c78a765abb0eec8" 429 | 430 | [metadata.files] 431 | atomicwrites = [ 432 | {file = "atomicwrites-1.3.0-py2.py3-none-any.whl", hash = "sha256:03472c30eb2c5d1ba9227e4c2ca66ab8287fbfbbda3888aa93dc2e28fc6811b4"}, 433 | {file = "atomicwrites-1.3.0.tar.gz", hash = "sha256:75a9445bac02d8d058d5e1fe689654ba5a6556a1dfd8ce6ec55a0ed79866cfa6"}, 434 | ] 435 | attrs = [ 436 | {file = "attrs-19.3.0-py2.py3-none-any.whl", hash = "sha256:08a96c641c3a74e44eb59afb61a24f2cb9f4d7188748e76ba4bb5edfa3cb7d1c"}, 437 | {file = "attrs-19.3.0.tar.gz", hash = "sha256:f7b7ce16570fe9965acd6d30101a28f62fb4a7f9e926b3bbc9b61f8b04247e72"}, 438 | ] 439 | certifi = [ 440 | {file = "certifi-2019.3.9-py2.py3-none-any.whl", hash = "sha256:59b7658e26ca9c7339e00f8f4636cdfe59d34fa37b9b04f6f9e9926b3cece1a5"}, 441 | {file = "certifi-2019.3.9.tar.gz", hash = "sha256:b26104d6835d1f5e49452a26eb2ff87fe7090b89dfcaee5ea2212697e1e1d7ae"}, 442 | ] 443 | chardet = [ 444 | {file = "chardet-3.0.4-py2.py3-none-any.whl", hash = "sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691"}, 445 | {file = "chardet-3.0.4.tar.gz", hash = "sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae"}, 446 | ] 447 | codecov = [ 448 | {file = "codecov-2.1.11-py2.py3-none-any.whl", hash = "sha256:ba8553a82942ce37d4da92b70ffd6d54cf635fc1793ab0a7dc3fecd6ebfb3df8"}, 449 | {file = "codecov-2.1.11-py3.8.egg", hash = "sha256:e95901d4350e99fc39c8353efa450050d2446c55bac91d90fcfd2354e19a6aef"}, 450 | {file = "codecov-2.1.11.tar.gz", hash = "sha256:6cde272454009d27355f9434f4e49f238c0273b216beda8472a65dc4957f473b"}, 451 | ] 452 | colorama = [ 453 | {file = "colorama-0.4.1-py2.py3-none-any.whl", hash = "sha256:f8ac84de7840f5b9c4e3347b3c1eaa50f7e49c2b07596221daec5edaabbd7c48"}, 454 | {file = "colorama-0.4.1.tar.gz", hash = "sha256:05eed71e2e327246ad6b38c540c4a3117230b19679b875190486ddd2d721422d"}, 455 | ] 456 | coverage = [ 457 | {file = "coverage-5.5-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:b6d534e4b2ab35c9f93f46229363e17f63c53ad01330df9f2d6bd1187e5eaacf"}, 458 | {file = "coverage-5.5-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:b7895207b4c843c76a25ab8c1e866261bcfe27bfaa20c192de5190121770672b"}, 459 | {file = "coverage-5.5-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:c2723d347ab06e7ddad1a58b2a821218239249a9e4365eaff6649d31180c1669"}, 460 | {file = "coverage-5.5-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:900fbf7759501bc7807fd6638c947d7a831fc9fdf742dc10f02956ff7220fa90"}, 461 | {file = "coverage-5.5-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:004d1880bed2d97151facef49f08e255a20ceb6f9432df75f4eef018fdd5a78c"}, 462 | {file = "coverage-5.5-cp27-cp27m-win32.whl", hash = "sha256:06191eb60f8d8a5bc046f3799f8a07a2d7aefb9504b0209aff0b47298333302a"}, 463 | {file = "coverage-5.5-cp27-cp27m-win_amd64.whl", hash = "sha256:7501140f755b725495941b43347ba8a2777407fc7f250d4f5a7d2a1050ba8e82"}, 464 | {file = "coverage-5.5-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:372da284cfd642d8e08ef606917846fa2ee350f64994bebfbd3afb0040436905"}, 465 | {file = "coverage-5.5-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:8963a499849a1fc54b35b1c9f162f4108017b2e6db2c46c1bed93a72262ed083"}, 466 | {file = "coverage-5.5-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:869a64f53488f40fa5b5b9dcb9e9b2962a66a87dab37790f3fcfb5144b996ef5"}, 467 | {file = "coverage-5.5-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:4a7697d8cb0f27399b0e393c0b90f0f1e40c82023ea4d45d22bce7032a5d7b81"}, 468 | {file = "coverage-5.5-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:8d0a0725ad7c1a0bcd8d1b437e191107d457e2ec1084b9f190630a4fb1af78e6"}, 469 | {file = "coverage-5.5-cp310-cp310-manylinux1_x86_64.whl", hash = "sha256:51cb9476a3987c8967ebab3f0fe144819781fca264f57f89760037a2ea191cb0"}, 470 | {file = "coverage-5.5-cp310-cp310-win_amd64.whl", hash = "sha256:c0891a6a97b09c1f3e073a890514d5012eb256845c451bd48f7968ef939bf4ae"}, 471 | {file = "coverage-5.5-cp35-cp35m-macosx_10_9_x86_64.whl", hash = "sha256:3487286bc29a5aa4b93a072e9592f22254291ce96a9fbc5251f566b6b7343cdb"}, 472 | {file = "coverage-5.5-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:deee1077aae10d8fa88cb02c845cfba9b62c55e1183f52f6ae6a2df6a2187160"}, 473 | {file = "coverage-5.5-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:f11642dddbb0253cc8853254301b51390ba0081750a8ac03f20ea8103f0c56b6"}, 474 | {file = "coverage-5.5-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:6c90e11318f0d3c436a42409f2749ee1a115cd8b067d7f14c148f1ce5574d701"}, 475 | {file = "coverage-5.5-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:30c77c1dc9f253283e34c27935fded5015f7d1abe83bc7821680ac444eaf7793"}, 476 | {file = "coverage-5.5-cp35-cp35m-win32.whl", hash = "sha256:9a1ef3b66e38ef8618ce5fdc7bea3d9f45f3624e2a66295eea5e57966c85909e"}, 477 | {file = "coverage-5.5-cp35-cp35m-win_amd64.whl", hash = "sha256:972c85d205b51e30e59525694670de6a8a89691186012535f9d7dbaa230e42c3"}, 478 | {file = "coverage-5.5-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:af0e781009aaf59e25c5a678122391cb0f345ac0ec272c7961dc5455e1c40066"}, 479 | {file = "coverage-5.5-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:74d881fc777ebb11c63736622b60cb9e4aee5cace591ce274fb69e582a12a61a"}, 480 | {file = "coverage-5.5-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:92b017ce34b68a7d67bd6d117e6d443a9bf63a2ecf8567bb3d8c6c7bc5014465"}, 481 | {file = "coverage-5.5-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:d636598c8305e1f90b439dbf4f66437de4a5e3c31fdf47ad29542478c8508bbb"}, 482 | {file = "coverage-5.5-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:41179b8a845742d1eb60449bdb2992196e211341818565abded11cfa90efb821"}, 483 | {file = "coverage-5.5-cp36-cp36m-win32.whl", hash = "sha256:040af6c32813fa3eae5305d53f18875bedd079960822ef8ec067a66dd8afcd45"}, 484 | {file = "coverage-5.5-cp36-cp36m-win_amd64.whl", hash = "sha256:5fec2d43a2cc6965edc0bb9e83e1e4b557f76f843a77a2496cbe719583ce8184"}, 485 | {file = "coverage-5.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:18ba8bbede96a2c3dde7b868de9dcbd55670690af0988713f0603f037848418a"}, 486 | {file = "coverage-5.5-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:2910f4d36a6a9b4214bb7038d537f015346f413a975d57ca6b43bf23d6563b53"}, 487 | {file = "coverage-5.5-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:f0b278ce10936db1a37e6954e15a3730bea96a0997c26d7fee88e6c396c2086d"}, 488 | {file = "coverage-5.5-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:796c9c3c79747146ebd278dbe1e5c5c05dd6b10cc3bcb8389dfdf844f3ead638"}, 489 | {file = "coverage-5.5-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:53194af30d5bad77fcba80e23a1441c71abfb3e01192034f8246e0d8f99528f3"}, 490 | {file = "coverage-5.5-cp37-cp37m-win32.whl", hash = "sha256:184a47bbe0aa6400ed2d41d8e9ed868b8205046518c52464fde713ea06e3a74a"}, 491 | {file = "coverage-5.5-cp37-cp37m-win_amd64.whl", hash = "sha256:2949cad1c5208b8298d5686d5a85b66aae46d73eec2c3e08c817dd3513e5848a"}, 492 | {file = "coverage-5.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:217658ec7187497e3f3ebd901afdca1af062b42cfe3e0dafea4cced3983739f6"}, 493 | {file = "coverage-5.5-cp38-cp38-manylinux1_i686.whl", hash = "sha256:1aa846f56c3d49205c952d8318e76ccc2ae23303351d9270ab220004c580cfe2"}, 494 | {file = "coverage-5.5-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:24d4a7de75446be83244eabbff746d66b9240ae020ced65d060815fac3423759"}, 495 | {file = "coverage-5.5-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:d1f8bf7b90ba55699b3a5e44930e93ff0189aa27186e96071fac7dd0d06a1873"}, 496 | {file = "coverage-5.5-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:970284a88b99673ccb2e4e334cfb38a10aab7cd44f7457564d11898a74b62d0a"}, 497 | {file = "coverage-5.5-cp38-cp38-win32.whl", hash = "sha256:01d84219b5cdbfc8122223b39a954820929497a1cb1422824bb86b07b74594b6"}, 498 | {file = "coverage-5.5-cp38-cp38-win_amd64.whl", hash = "sha256:2e0d881ad471768bf6e6c2bf905d183543f10098e3b3640fc029509530091502"}, 499 | {file = "coverage-5.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d1f9ce122f83b2305592c11d64f181b87153fc2c2bbd3bb4a3dde8303cfb1a6b"}, 500 | {file = "coverage-5.5-cp39-cp39-manylinux1_i686.whl", hash = "sha256:13c4ee887eca0f4c5a247b75398d4114c37882658300e153113dafb1d76de529"}, 501 | {file = "coverage-5.5-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:52596d3d0e8bdf3af43db3e9ba8dcdaac724ba7b5ca3f6358529d56f7a166f8b"}, 502 | {file = "coverage-5.5-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:2cafbbb3af0733db200c9b5f798d18953b1a304d3f86a938367de1567f4b5bff"}, 503 | {file = "coverage-5.5-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:44d654437b8ddd9eee7d1eaee28b7219bec228520ff809af170488fd2fed3e2b"}, 504 | {file = "coverage-5.5-cp39-cp39-win32.whl", hash = "sha256:d314ed732c25d29775e84a960c3c60808b682c08d86602ec2c3008e1202e3bb6"}, 505 | {file = "coverage-5.5-cp39-cp39-win_amd64.whl", hash = "sha256:13034c4409db851670bc9acd836243aeee299949bd5673e11844befcb0149f03"}, 506 | {file = "coverage-5.5-pp36-none-any.whl", hash = "sha256:f030f8873312a16414c0d8e1a1ddff2d3235655a2174e3648b4fa66b3f2f1079"}, 507 | {file = "coverage-5.5-pp37-none-any.whl", hash = "sha256:2a3859cb82dcbda1cfd3e6f71c27081d18aa251d20a17d87d26d4cd216fb0af4"}, 508 | {file = "coverage-5.5.tar.gz", hash = "sha256:ebe78fe9a0e874362175b02371bdfbee64d8edc42a044253ddf4ee7d3c15212c"}, 509 | ] 510 | flake8 = [ 511 | {file = "flake8-3.9.0-py2.py3-none-any.whl", hash = "sha256:12d05ab02614b6aee8df7c36b97d1a3b2372761222b19b58621355e82acddcff"}, 512 | {file = "flake8-3.9.0.tar.gz", hash = "sha256:78873e372b12b093da7b5e5ed302e8ad9e988b38b063b61ad937f26ca58fc5f0"}, 513 | ] 514 | flake8-alfred = [ 515 | {file = "flake8-alfred-1.1.1.tar.gz", hash = "sha256:c68b40165b00d3c9f0da6e64f08e95da919fc358dc8b8596374f54fa63cd6be5"}, 516 | {file = "flake8_alfred-1.1.1-py3-none-any.whl", hash = "sha256:3cce4a91fe07b50b5efcb650f5f13901052b64c3850f9cf10cce330efab6b34d"}, 517 | ] 518 | flake8-blind-except = [ 519 | {file = "flake8-blind-except-0.2.0.tar.gz", hash = "sha256:02a860a1a19cb602c006a3fe0778035b0d14d3f57929b4b798bc7d6684f204e5"}, 520 | ] 521 | flake8-bugbear = [ 522 | {file = "flake8-bugbear-21.4.3.tar.gz", hash = "sha256:2346c81f889955b39e4a368eb7d508de723d9de05716c287dc860a4073dc57e7"}, 523 | {file = "flake8_bugbear-21.4.3-py36.py37.py38-none-any.whl", hash = "sha256:4f305dca96be62bf732a218fe6f1825472a621d3452c5b994d8f89dae21dbafa"}, 524 | ] 525 | flake8-builtins = [ 526 | {file = "flake8-builtins-1.5.3.tar.gz", hash = "sha256:09998853b2405e98e61d2ff3027c47033adbdc17f9fe44ca58443d876eb00f3b"}, 527 | {file = "flake8_builtins-1.5.3-py2.py3-none-any.whl", hash = "sha256:7706babee43879320376861897e5d1468e396a40b8918ed7bccf70e5f90b8687"}, 528 | ] 529 | flake8-class-newline = [ 530 | {file = "flake8-class-newline-1.6.0.tar.gz", hash = "sha256:514c4923c88eb8b3dd52db4b55b8d3483520db89db80af6ba812a4af15421ff1"}, 531 | {file = "flake8_class_newline-1.6.0-py2-none-any.whl", hash = "sha256:44e77747396c7586bbfe7814df9caf17d92000cb70a5ef9df5d721c934c6db33"}, 532 | {file = "flake8_class_newline-1.6.0-py3-none-any.whl", hash = "sha256:130a6991e8c58ec9a6380d273e449e61bcd974b446c97f1e16493f6350fa8d35"}, 533 | ] 534 | flake8-comprehensions = [ 535 | {file = "flake8-comprehensions-3.4.0.tar.gz", hash = "sha256:c00039be9f3959a26a98da3024f0fe809859bf1753ccb90e228cc40f3ac31ca7"}, 536 | {file = "flake8_comprehensions-3.4.0-py3-none-any.whl", hash = "sha256:7258a28e229fb9a8d16370b9c47a7d66396ba0201abb06c9d11df41b18ed64c4"}, 537 | ] 538 | flake8-deprecated = [ 539 | {file = "flake8-deprecated-1.3.tar.gz", hash = "sha256:9fa5a0c5c81fb3b34c53a0e4f16cd3f0a3395078cfd4988011cbab5fb0afa7f7"}, 540 | {file = "flake8_deprecated-1.3-py2.py3-none-any.whl", hash = "sha256:211951854837ced9ec997a75c6e5b957f3536a735538ee0620b76539fd3706cd"}, 541 | ] 542 | flake8-import-order = [ 543 | {file = "flake8-import-order-0.18.1.tar.gz", hash = "sha256:a28dc39545ea4606c1ac3c24e9d05c849c6e5444a50fb7e9cdd430fc94de6e92"}, 544 | {file = "flake8_import_order-0.18.1-py2.py3-none-any.whl", hash = "sha256:90a80e46886259b9c396b578d75c749801a41ee969a235e163cfe1be7afd2543"}, 545 | ] 546 | flake8-mutable = [ 547 | {file = "flake8-mutable-1.2.0.tar.gz", hash = "sha256:ee9b77111b867d845177bbc289d87d541445ffcc6029a0c5c65865b42b18c6a6"}, 548 | {file = "flake8_mutable-1.2.0-py2-none-any.whl", hash = "sha256:38fd9dadcbcda6550a916197bc40ed76908119dabb37fbcca30873666c31d2d5"}, 549 | ] 550 | flake8-polyfill = [ 551 | {file = "flake8-polyfill-1.0.2.tar.gz", hash = "sha256:e44b087597f6da52ec6393a709e7108b2905317d0c0b744cdca6208e670d8eda"}, 552 | {file = "flake8_polyfill-1.0.2-py2.py3-none-any.whl", hash = "sha256:12be6a34ee3ab795b19ca73505e7b55826d5f6ad7230d31b18e106400169b9e9"}, 553 | ] 554 | flake8-quotes = [ 555 | {file = "flake8-quotes-3.2.0.tar.gz", hash = "sha256:3f1116e985ef437c130431ac92f9b3155f8f652fda7405ac22ffdfd7a9d1055e"}, 556 | ] 557 | flake8-tuple = [ 558 | {file = "flake8_tuple-0.4.1-py2.py3-none-any.whl", hash = "sha256:d828cc8e461c50cacca116e9abb0c9e3be565e8451d3f5c00578c63670aae680"}, 559 | {file = "flake8_tuple-0.4.1.tar.gz", hash = "sha256:8a1b42aab134ef4c3fef13c6a8f383363f158b19fbc165bd91aed9c51851a61d"}, 560 | ] 561 | idna = [ 562 | {file = "idna-2.8-py2.py3-none-any.whl", hash = "sha256:ea8b7f6188e6fa117537c3df7da9fc686d485087abf6ac197f9c46432f7e4a3c"}, 563 | {file = "idna-2.8.tar.gz", hash = "sha256:c357b3f628cf53ae2c4c05627ecc484553142ca23264e593d327bcde5e9c3407"}, 564 | ] 565 | importlib-metadata = [ 566 | {file = "importlib_metadata-0.17-py2.py3-none-any.whl", hash = "sha256:df1403cd3aebeb2b1dcd3515ca062eecb5bd3ea7611f18cba81130c68707e879"}, 567 | {file = "importlib_metadata-0.17.tar.gz", hash = "sha256:a9f185022cfa69e9ca5f7eabfd5a58b689894cb78a11e3c8c89398a8ccbb8e7f"}, 568 | ] 569 | iniconfig = [ 570 | {file = "iniconfig-1.0.1-py3-none-any.whl", hash = "sha256:80cf40c597eb564e86346103f609d74efce0f6b4d4f30ec8ce9e2c26411ba437"}, 571 | {file = "iniconfig-1.0.1.tar.gz", hash = "sha256:e5f92f89355a67de0595932a6c6c02ab4afddc6fcdc0bfc5becd0d60884d3f69"}, 572 | ] 573 | mccabe = [ 574 | {file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"}, 575 | {file = "mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"}, 576 | ] 577 | packaging = [ 578 | {file = "packaging-19.0-py2.py3-none-any.whl", hash = "sha256:9e1cbf8c12b1f1ce0bb5344b8d7ecf66a6f8a6e91bcb0c84593ed6d3ab5c4ab3"}, 579 | {file = "packaging-19.0.tar.gz", hash = "sha256:0c98a5d0be38ed775798ece1b9727178c4469d9c3b4ada66e8e6b7849f8732af"}, 580 | ] 581 | pep8-naming = [ 582 | {file = "pep8-naming-0.11.1.tar.gz", hash = "sha256:a1dd47dd243adfe8a83616e27cf03164960b507530f155db94e10b36a6cd6724"}, 583 | {file = "pep8_naming-0.11.1-py2.py3-none-any.whl", hash = "sha256:f43bfe3eea7e0d73e8b5d07d6407ab47f2476ccaeff6937c84275cd30b016738"}, 584 | ] 585 | pluggy = [ 586 | {file = "pluggy-0.12.0-py2.py3-none-any.whl", hash = "sha256:b9817417e95936bf75d85d3f8767f7df6cdde751fc40aed3bb3074cbcb77757c"}, 587 | {file = "pluggy-0.12.0.tar.gz", hash = "sha256:0825a152ac059776623854c1543d65a4ad408eb3d33ee114dff91e57ec6ae6fc"}, 588 | ] 589 | py = [ 590 | {file = "py-1.9.0-py2.py3-none-any.whl", hash = "sha256:366389d1db726cd2fcfc79732e75410e5fe4d31db13692115529d34069a043c2"}, 591 | {file = "py-1.9.0.tar.gz", hash = "sha256:9ca6883ce56b4e8da7e79ac18787889fa5206c79dcc67fb065376cd2fe03f342"}, 592 | ] 593 | pycodestyle = [ 594 | {file = "pycodestyle-2.7.0-py2.py3-none-any.whl", hash = "sha256:514f76d918fcc0b55c6680472f0a37970994e07bbb80725808c17089be302068"}, 595 | {file = "pycodestyle-2.7.0.tar.gz", hash = "sha256:c389c1d06bf7904078ca03399a4816f974a1d590090fecea0c63ec26ebaf1cef"}, 596 | ] 597 | pyflakes = [ 598 | {file = "pyflakes-2.3.0-py2.py3-none-any.whl", hash = "sha256:910208209dcea632721cb58363d0f72913d9e8cf64dc6f8ae2e02a3609aba40d"}, 599 | {file = "pyflakes-2.3.0.tar.gz", hash = "sha256:e59fd8e750e588358f1b8885e5a4751203a0516e0ee6d34811089ac294c8806f"}, 600 | ] 601 | pyparsing = [ 602 | {file = "pyparsing-2.4.0-py2.py3-none-any.whl", hash = "sha256:9b6323ef4ab914af344ba97510e966d64ba91055d6b9afa6b30799340e89cc03"}, 603 | {file = "pyparsing-2.4.0.tar.gz", hash = "sha256:1873c03321fc118f4e9746baf201ff990ceb915f433f23b395f5580d1840cb2a"}, 604 | ] 605 | pytest = [ 606 | {file = "pytest-6.2.3-py3-none-any.whl", hash = "sha256:6ad9c7bdf517a808242b998ac20063c41532a570d088d77eec1ee12b0b5574bc"}, 607 | {file = "pytest-6.2.3.tar.gz", hash = "sha256:671238a46e4df0f3498d1c3270e5deb9b32d25134c99b7d75370a68cfbe9b634"}, 608 | ] 609 | pytest-cov = [ 610 | {file = "pytest-cov-2.11.1.tar.gz", hash = "sha256:359952d9d39b9f822d9d29324483e7ba04a3a17dd7d05aa6beb7ea01e359e5f7"}, 611 | {file = "pytest_cov-2.11.1-py2.py3-none-any.whl", hash = "sha256:bdb9fdb0b85a7cc825269a4c56b48ccaa5c7e365054b6038772c32ddcdc969da"}, 612 | ] 613 | requests = [ 614 | {file = "requests-2.22.0-py2.py3-none-any.whl", hash = "sha256:9cf5292fcd0f598c671cfc1e0d7d1a7f13bb8085e9a590f48c010551dc6c4b31"}, 615 | {file = "requests-2.22.0.tar.gz", hash = "sha256:11e007a8a2aa0323f5a921e9e6a2d7e4e67d9877e85773fba9ba6419025cbeb4"}, 616 | ] 617 | six = [ 618 | {file = "six-1.12.0-py2.py3-none-any.whl", hash = "sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c"}, 619 | {file = "six-1.12.0.tar.gz", hash = "sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73"}, 620 | ] 621 | toml = [ 622 | {file = "toml-0.10.1-py2.py3-none-any.whl", hash = "sha256:bda89d5935c2eac546d648028b9901107a595863cb36bae0c73ac804a9b4ce88"}, 623 | {file = "toml-0.10.1.tar.gz", hash = "sha256:926b612be1e5ce0634a2ca03470f95169cf16f939018233a670519cb4ac58b0f"}, 624 | ] 625 | urllib3 = [ 626 | {file = "urllib3-1.25.3-py2.py3-none-any.whl", hash = "sha256:b246607a25ac80bedac05c6f282e3cdaf3afb65420fd024ac94435cabe6e18d1"}, 627 | {file = "urllib3-1.25.3.tar.gz", hash = "sha256:dbe59173209418ae49d485b87d1681aefa36252ee85884c31346debd19463232"}, 628 | ] 629 | zipp = [ 630 | {file = "zipp-0.5.1-py2.py3-none-any.whl", hash = "sha256:8c1019c6aad13642199fbe458275ad6a84907634cc9f0989877ccc4a2840139d"}, 631 | {file = "zipp-0.5.1.tar.gz", hash = "sha256:ca943a7e809cc12257001ccfb99e3563da9af99d52f261725e96dfe0f9275bc3"}, 632 | ] 633 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = [ "poetry>=0.12",] 3 | build-backend = "poetry.masonry.api" 4 | 5 | [tool.poetry] 6 | name = "typed-json-dataclass" 7 | version = "1.2.1" 8 | description = "Make your dataclasses automatically validate their types" 9 | authors = [ "Aaron ",] 10 | license = "MIT" 11 | readme = "README.md" 12 | homepage = "http://github.com/abatilo/typed-json-dataclass" 13 | repository = "http://github.com/abatilo/typed-json-dataclass" 14 | keywords = [ "dataclasses", "dataclass", "json", "mypy", "pyre", "marshmallow", "attrs", "cattrs",] 15 | classifiers = [ "Intended Audience :: Developers", "Development Status :: 5 - Production/Stable", "Topic :: Software Development :: Libraries :: Python Modules", "Environment :: Web Environment", "License :: OSI Approved :: MIT License", "Programming Language :: Python :: 3.7",] 16 | 17 | [tool.poetry.dependencies] 18 | python = "^3.7" 19 | flake8-tuple = "^0.4.0" 20 | 21 | [tool.poetry.dev-dependencies] 22 | flake8 = "^3.9" 23 | pytest = "^6.2" 24 | coverage = "^5.5" 25 | flake8-quotes = "^3.2" 26 | flake8-deprecated = "^1.3" 27 | flake8-mutable = "^1.2" 28 | flake8-builtins = "^1.5" 29 | flake8-bugbear = "^21.4" 30 | flake8-blind-except = "^0.2.0" 31 | flake8-class-newline = "^1.6" 32 | flake8-import-order = "^0.18.1" 33 | flake8-alfred = "^1.1" 34 | pep8-naming = "^0.11.1" 35 | flake8-comprehensions = "^3.4" 36 | pytest-cov = "^2.11" 37 | codecov = "^2.1" 38 | -------------------------------------------------------------------------------- /tests/test_issue_8.py: -------------------------------------------------------------------------------- 1 | """Tests for https://github.com/abatilo/typed-json-dataclass/issues/8""" 2 | 3 | from dataclasses import InitVar, dataclass 4 | 5 | import pytest 6 | from typed_json_dataclass import TypedJsonMixin 7 | 8 | 9 | @dataclass 10 | class DataclassWithInitVar(TypedJsonMixin): 11 | init: InitVar[str] 12 | a: int = 0 13 | b: str = '' 14 | 15 | def __post_init__(self, init: str) -> None: 16 | self.a = len(init) 17 | self.b = init[0] 18 | super().__post_init__() 19 | 20 | 21 | @dataclass 22 | class DataclassWithDefaultInitVar(TypedJsonMixin): 23 | init: InitVar[str] = None 24 | a: int = 0 25 | b: str = '' 26 | 27 | def __post_init__(self, init: str) -> None: 28 | if init is None: 29 | # from_dict 30 | return 31 | self.a = len(init) 32 | self.b = init[0] 33 | 34 | 35 | def test_that_instantiation_of_dataclass_with_init_var_typechecks() -> None: 36 | result = DataclassWithInitVar('foo') 37 | 38 | assert result.a == 3 39 | assert result.b == 'f' 40 | 41 | 42 | def test_that_dataclass_with_init_var_to_dict_leads_to_warning() -> None: 43 | dcls = DataclassWithInitVar('foo') 44 | 45 | with pytest.warns(UserWarning, match='init-only variables'): 46 | result = dcls.to_dict() 47 | 48 | assert result == {'a': 3, 'b': 'f'} 49 | 50 | 51 | def test_that_dataclass_with_init_var_to_json_leads_to_warning() -> None: 52 | dcls = DataclassWithInitVar('foo') 53 | 54 | with pytest.warns(UserWarning, match='init-only variables'): 55 | result = dcls.to_json() 56 | 57 | assert result == '{"a": 3, "b": "f"}' 58 | 59 | 60 | def test_init_var_dc_to_dict_no_warnings_when_acknowledged() -> None: 61 | dcls = DataclassWithInitVar('foo') 62 | 63 | with pytest.warns(None) as recorded_warnings: 64 | result = dcls.to_dict(warn_on_initvar=False) 65 | 66 | assert not recorded_warnings 67 | assert result == {'a': 3, 'b': 'f'} 68 | 69 | 70 | def test_init_var_dc_to_json_no_warnings_when_acknowledged() -> None: 71 | dcls = DataclassWithInitVar('foo') 72 | 73 | with pytest.warns(None) as recorded_warnings: 74 | result = dcls.to_json(warn_on_initvar=False) 75 | 76 | assert not recorded_warnings 77 | assert result == '{"a": 3, "b": "f"}' 78 | 79 | 80 | def test_init_var_dc_to_dict_no_warning_when_default_value_provided() -> None: 81 | dcls = DataclassWithDefaultInitVar('foo') 82 | 83 | with pytest.warns(None) as recorded_warnings: 84 | result = dcls.to_dict() 85 | 86 | assert not recorded_warnings 87 | assert result == {'a': 3, 'b': 'f'} 88 | 89 | 90 | def test_init_var_dc_to_json_no_warning_when_default_value_provided() -> None: 91 | dcls = DataclassWithDefaultInitVar('foo') 92 | 93 | with pytest.warns(None) as recorded_warnings: 94 | result = dcls.to_json() 95 | 96 | assert not recorded_warnings 97 | assert result == '{"a": 3, "b": "f"}' 98 | 99 | 100 | def test_that_dataclass_with_init_var_from_dict_leads_to_typeerror() -> None: 101 | raw_dict = {'a': 3, 'b': 'f'} 102 | 103 | with pytest.raises(TypeError, match='init-only variables'): 104 | DataclassWithInitVar.from_dict(raw_dict) 105 | 106 | 107 | def test_that_dataclass_with_init_var_from_json_leads_to_typeerror() -> None: 108 | raw_json = '{"a": 3, "b": "f"}' 109 | 110 | with pytest.raises(TypeError, match='init-only variables'): 111 | DataclassWithInitVar.from_json(raw_json) 112 | 113 | 114 | def test_init_var_dc_from_dict_no_error_when_default_value_provided() -> None: 115 | raw_dict = {'a': 3, 'b': 'f'} 116 | 117 | result = DataclassWithDefaultInitVar.from_dict(raw_dict) 118 | 119 | assert result.a == 3 120 | assert result.b == 'f' 121 | 122 | 123 | def test_init_var_dc_from_json_no_error_when_default_value_provided() -> None: 124 | raw_json = '{"a": 3, "b": "f"}' 125 | 126 | result = DataclassWithDefaultInitVar.from_json(raw_json) 127 | 128 | assert result.a == 3 129 | assert result.b == 'f' 130 | 131 | 132 | @dataclass 133 | class DataclassWithNestedInitVar(TypedJsonMixin): 134 | child_dc: DataclassWithInitVar 135 | 136 | 137 | @dataclass 138 | class DataclassWithVeryNestedInitVar(TypedJsonMixin): 139 | child_dc: DataclassWithNestedInitVar 140 | 141 | 142 | @dataclass 143 | class DataclassWithoutInitVar(TypedJsonMixin): 144 | a: int 145 | b: str 146 | 147 | 148 | @pytest.mark.parametrize('cls', [ 149 | DataclassWithInitVar, 150 | DataclassWithNestedInitVar, 151 | DataclassWithVeryNestedInitVar]) 152 | def test_dc_with_init_var_in_child_should_contain_init_var(cls) -> None: 153 | assert cls._contains_non_default_init_vars() 154 | 155 | 156 | def test_dc_without_init_var_should_not_contain_init_var() -> None: 157 | assert not DataclassWithoutInitVar._contains_non_default_init_vars() 158 | -------------------------------------------------------------------------------- /tests/test_mapping_configurations.py: -------------------------------------------------------------------------------- 1 | from dataclasses import dataclass 2 | 3 | import pytest 4 | from typed_json_dataclass import TypedJsonMixin, MappingMode 5 | 6 | 7 | @dataclass 8 | class SnakeCaseObjects(TypedJsonMixin): 9 | object_id: str 10 | 11 | 12 | @dataclass 13 | class CamelCaseObjects(TypedJsonMixin): 14 | objectId: str 15 | 16 | 17 | @dataclass 18 | class ChildObject(TypedJsonMixin): 19 | objectId: str 20 | 21 | 22 | @dataclass 23 | class ParentObject(TypedJsonMixin): 24 | objectId: str 25 | child: ChildObject 26 | 27 | 28 | def test_mapping_from_dict_to_snake_case(): 29 | camel_case_object = { 30 | 'objectId': 'asdf' 31 | } 32 | 33 | expected_object = SnakeCaseObjects('asdf') 34 | assert SnakeCaseObjects.from_dict( 35 | camel_case_object, 36 | mapping_mode=MappingMode.SnakeCase) == expected_object 37 | 38 | 39 | def test_mapping_from_dict_to_camel_case(): 40 | 41 | snake_case_json = { 42 | 'object_id': 'asdf' 43 | } 44 | 45 | expected_object = CamelCaseObjects('asdf') 46 | assert CamelCaseObjects.from_dict( 47 | snake_case_json, 48 | mapping_mode=MappingMode.CamelCase) == expected_object 49 | 50 | 51 | def test_mapping_from_json_to_snake_case(): 52 | camel_case_json = """ 53 | { 54 | "objectId": "asdf" 55 | } 56 | """ 57 | 58 | expected_object = SnakeCaseObjects('asdf') 59 | assert SnakeCaseObjects.from_json( 60 | camel_case_json, 61 | mapping_mode=MappingMode.SnakeCase) == expected_object 62 | 63 | 64 | def test_mapping_from_json_to_camel_case(): 65 | snake_case_json = """ 66 | { 67 | "object_id": "asdf" 68 | } 69 | """ 70 | 71 | expected_object = CamelCaseObjects('asdf') 72 | assert CamelCaseObjects.from_json( 73 | snake_case_json, 74 | mapping_mode=MappingMode.CamelCase) == expected_object 75 | 76 | 77 | def test_mapping_to_camel_case_dict_from_snake_case(): 78 | expected = { 79 | 'object_id': 'asdf' 80 | } 81 | 82 | target = CamelCaseObjects('asdf') 83 | actual = target.to_dict(mapping_mode=MappingMode.SnakeCase) 84 | assert expected == actual 85 | 86 | 87 | def test_mapping_to_snake_case_dict_from_camel_case(): 88 | 89 | expected = { 90 | 'objectId': 'asdf' 91 | } 92 | 93 | target = SnakeCaseObjects('asdf') 94 | actual = target.to_dict(mapping_mode=MappingMode.CamelCase) 95 | assert expected == actual 96 | 97 | 98 | def test_from_json_with_invalid_mapping_mode(): 99 | with pytest.raises(ValueError) as e_info: 100 | SnakeCaseObjects.from_json('{"object_id": ""}', mapping_mode='Invalid') 101 | assert str(e_info.value) == 'Invalid mapping mode' 102 | 103 | 104 | def test_to_json_with_invalid_mapping_mode(): 105 | with pytest.raises(ValueError) as e_info: 106 | SnakeCaseObjects('asdf').to_json(mapping_mode='Invalid') 107 | assert str(e_info.value) == 'Invalid mapping mode' 108 | 109 | 110 | def test_from_dict_with_invalid_mapping_mode(): 111 | with pytest.raises(ValueError) as e_info: 112 | SnakeCaseObjects.from_dict({'object_id': ''}, mapping_mode='Invalid') 113 | assert str(e_info.value) == 'Invalid mapping mode' 114 | 115 | 116 | def test_to_dict_with_invalid_mapping_mode(): 117 | with pytest.raises(ValueError) as e_info: 118 | SnakeCaseObjects('asdf').to_dict(mapping_mode='Invalid') 119 | assert str(e_info.value) == 'Invalid mapping mode' 120 | 121 | 122 | def test_recursive_from_dict_mapping(): 123 | target = { 124 | 'object_id': 'asdf', 125 | 'child': { 126 | 'object_id': 'fdsa' 127 | } 128 | } 129 | expected = ParentObject('asdf', ChildObject('fdsa')) 130 | actual = ParentObject.from_dict(target, mapping_mode=MappingMode.CamelCase) 131 | assert expected == actual 132 | 133 | 134 | def test_recursive_to_dict_mapping(): 135 | expected = { 136 | 'object_id': 'asdf', 137 | 'child': { 138 | 'object_id': 'fdsa' 139 | } 140 | } 141 | target = ParentObject('asdf', ChildObject('fdsa')) 142 | actual = target.to_dict(mapping_mode=MappingMode.SnakeCase) 143 | assert expected == actual 144 | 145 | 146 | def test_recursive_from_json_mapping(): 147 | target = """ 148 | { 149 | "object_id": "asdf", 150 | "child": { 151 | "object_id": "fdsa" 152 | } 153 | } 154 | """ 155 | expected = ParentObject('asdf', ChildObject('fdsa')) 156 | actual = ParentObject.from_json(target, mapping_mode=MappingMode.CamelCase) 157 | assert expected == actual 158 | 159 | 160 | def test_recursive_to_json_mapping(): 161 | expected = """{"object_id": "asdf", "child": {"object_id": "fdsa"}}""" 162 | target = ParentObject('asdf', ChildObject('fdsa')) 163 | actual = target.to_json(mapping_mode=MappingMode.SnakeCase) 164 | assert expected == actual 165 | -------------------------------------------------------------------------------- /tests/test_optionals.py: -------------------------------------------------------------------------------- 1 | from dataclasses import dataclass 2 | from typing import Optional, Union 3 | 4 | import pytest 5 | from typed_json_dataclass import TypedJsonMixin 6 | 7 | 8 | @dataclass 9 | class PersonWithOptionalAge(TypedJsonMixin): 10 | name: str 11 | age: Optional[int] = None 12 | 13 | 14 | def test_optionals_are_handled(): 15 | assert PersonWithOptionalAge('John', 42).to_dict() == { 16 | 'name': 'John', 17 | 'age': 42 18 | } 19 | 20 | 21 | def test_optionals_are_handled_with_empty_value(): 22 | assert PersonWithOptionalAge('John').to_dict() == { 23 | 'name': 'John', 24 | } 25 | 26 | 27 | def test_optionals_are_handled_with_empty_value_and_keeps_none(): 28 | assert PersonWithOptionalAge('John').to_dict(keep_none=True) == { 29 | 'name': 'John', 30 | 'age': None 31 | } 32 | 33 | 34 | def test_optionals_that_dont_match_raise(): 35 | with pytest.raises(TypeError) as e_info: 36 | print(PersonWithOptionalAge('John', '42').to_dict()) 37 | assert ('PersonWithOptionalAge.age was defined to be any of: (, ) but was found to be instead") == str(e_info.value) 40 | 41 | 42 | @dataclass 43 | class FirstNameLastName(TypedJsonMixin): 44 | first: str 45 | last: str 46 | 47 | 48 | @dataclass 49 | class PersonWithUnionType(TypedJsonMixin): 50 | name: Union[str, FirstNameLastName] 51 | 52 | 53 | def test_unions_are_all_respected(): 54 | assert PersonWithUnionType('John').to_dict() == { 55 | 'name': 'John', 56 | } 57 | 58 | assert PersonWithUnionType(FirstNameLastName('John', 'Doe')).to_dict() == { 59 | 'name': { 60 | 'first': 'John', 61 | 'last': 'Doe' 62 | }, 63 | } 64 | -------------------------------------------------------------------------------- /tests/test_typed_json_dataclass.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3.7 2 | from dataclasses import dataclass 3 | from typing import List 4 | 5 | import pytest 6 | from typed_json_dataclass import TypedJsonMixin 7 | 8 | 9 | @dataclass 10 | class Label(TypedJsonMixin): 11 | name: str 12 | 13 | 14 | @dataclass 15 | class Paragraph(TypedJsonMixin): 16 | text: str 17 | labels: List[Label] = None 18 | 19 | 20 | @dataclass 21 | class Document(TypedJsonMixin): 22 | name: str 23 | paragraphs: List[Paragraph] = None 24 | tags: List[str] = None 25 | 26 | 27 | @dataclass 28 | class Author(TypedJsonMixin): 29 | name: str 30 | 31 | 32 | @dataclass 33 | class Book(TypedJsonMixin): 34 | title: str 35 | author: Author 36 | 37 | 38 | # dict based tests 39 | 40 | 41 | def test_that_valid_dict_becomes_valid_object(): 42 | raw_dict = {'name': 'msa.doc'} 43 | expected = Document('msa.doc') 44 | actual = Document.from_dict(raw_dict) 45 | assert expected == actual 46 | 47 | 48 | def test_that_invalid_dict_throws_exception(): 49 | raw_dict = {'nam': 'msa.doc'} 50 | with pytest.raises(TypeError) as e_info: 51 | Document.from_dict(raw_dict) 52 | assert ("__init__() got an unexpected keyword argument 'nam'" == 53 | str(e_info.value)) 54 | 55 | 56 | def test_that_dict_with_wrong_type_throws_exception(): 57 | raw_dict = {'name': True} 58 | with pytest.raises(TypeError) as e_info: 59 | Document.from_dict(raw_dict) 60 | assert ("Document.name is expected to be , but value " 61 | "True with type " 62 | 'was found instead') == str(e_info.value) 63 | 64 | 65 | def test_that_lists_with_complex_objects_are_handled_correctly(): 66 | raw_dict = {'name': 'msa.doc', 'paragraphs': [{'text': 'first paragraph', 67 | 'labels': [{'name': 'msa'}]}, 68 | {'text': 'second paragraph'}], 'tags': ['law', 'important']} 69 | expected = Document('msa.doc', [ 70 | Paragraph('first paragraph', [Label('msa')]), 71 | Paragraph('second paragraph') 72 | ], 73 | ['law', 'important']) 74 | actual = Document.from_dict(raw_dict) 75 | assert expected == actual 76 | 77 | 78 | def test_that_nested_objects_in_lists_with_invalid_keys_throws_exception(): 79 | raw_dict = {'name': 'msa.doc', 'paragraphs': [{'tex': 'first paragraph'}]} 80 | with pytest.raises(TypeError) as e_info: 81 | Document.from_dict(raw_dict) 82 | assert (("__init__() got an unexpected keyword argument 'tex'") == 83 | str(e_info.value)) 84 | 85 | 86 | def test_that_objects_with_wrong_type_in_nested_list_throws_exception(): 87 | raw_dict = {'name': 'msa.doc', 'paragraphs': [{'text': 0}]} 88 | with pytest.raises(TypeError) as e_info: 89 | Document.from_dict(raw_dict) 90 | assert ("Paragraph.text is expected to be , but value 0 " 91 | "with type " 92 | 'was found instead') == str(e_info.value) 93 | 94 | 95 | def test_that_nested_object_that_is_not_in_list_is_handled_correctly(): 96 | raw_dict = {'title': 'book', 'author': {'name': 'George'}} 97 | expected = Book('book', Author('George')) 98 | actual = Book.from_dict(raw_dict) 99 | assert expected == actual 100 | 101 | 102 | def test_that_nested_object_that_is_not_in_list_with_wrong_type_throws(): 103 | raw_dict = {'title': 'book', 'author': {'name': 0}} 104 | with pytest.raises(TypeError) as e_info: 105 | Book.from_dict(raw_dict) 106 | assert (("Book.author is expected to be , " 108 | "but value {'name': 0} is a dict with unexpected keys") == 109 | str(e_info.value)) 110 | 111 | 112 | def test_document_to_dict_without_nulls(): 113 | expected = {'name': 'msa.doc'} 114 | actual = Document('msa.doc').to_dict() 115 | assert expected == actual 116 | 117 | 118 | def test_document_to_dict_with_null(): 119 | expected = {'name': 'msa.doc', 'paragraphs': None, 'tags': None} 120 | actual = Document('msa.doc').to_dict(keep_none=True) 121 | assert expected == actual 122 | 123 | 124 | def test_that_dict_with_untyped_list_throws_exception(): 125 | with pytest.raises(TypeError) as e_info: 126 | MissingListType([]) 127 | assert ("MissingListType.some_list was defined as a , " 128 | 'but is missing information about the type of the elements ' 129 | 'inside it') == str(e_info.value) 130 | 131 | # Json string based tests 132 | 133 | 134 | def test_that_valid_json_becomes_valid_object(): 135 | raw_json = '{"name": "msa.doc"}' 136 | expected = Document('msa.doc') 137 | actual = Document.from_json(raw_json) 138 | assert expected == actual 139 | 140 | 141 | def test_that_invalid_json_throws_exception(): 142 | raw_json = '{"nam": "msa.doc"}' 143 | with pytest.raises(TypeError) as e_info: 144 | Document.from_json(raw_json) 145 | assert ("__init__() got an unexpected keyword argument 'nam'" == 146 | str(e_info.value)) 147 | 148 | 149 | def test_that_json_with_wrong_type_throws_exception(): 150 | raw_json = '{"name": true}' 151 | with pytest.raises(TypeError) as e_info: 152 | Document.from_json(raw_json) 153 | assert ("Document.name is expected to be , but value True " 154 | "with type " 155 | 'was found instead') == str(e_info.value) 156 | 157 | 158 | def test_that_lists_with_complex_json_are_handled_correctly(): 159 | raw_json = ('{"name": "msa.doc", "paragraphs": [{"text": "first ' 160 | 'paragraph", "labels": [{"name": "msa"}]},' 161 | '{"text": "second paragraph"}], "tags": ["law", "important"]}') 162 | expected = Document('msa.doc', [ 163 | Paragraph('first paragraph', [Label('msa')]), 164 | Paragraph('second paragraph') 165 | ], 166 | ['law', 'important']) 167 | actual = Document.from_json(raw_json) 168 | assert expected == actual 169 | 170 | 171 | def test_that_nested_json_in_lists_with_invalid_keys_throws_exception(): 172 | raw_json = ('{"name": "msa.doc", "paragraphs": ' 173 | '[{"tex": "first paragraph"}]}') 174 | with pytest.raises(TypeError) as e_info: 175 | Document.from_json(raw_json) 176 | assert (("__init__() got an unexpected keyword argument 'tex'") == 177 | str(e_info.value)) 178 | 179 | 180 | def test_that_json_with_wrong_type_in_nested_list_throws_exception(): 181 | raw_json = '{"name": "msa.doc", "paragraphs": [{"text": 0}]}' 182 | with pytest.raises(TypeError) as e_info: 183 | Document.from_json(raw_json) 184 | assert ("Paragraph.text is expected to be , but value 0 " 185 | "with type " 186 | 'was found instead') == str(e_info.value) 187 | 188 | 189 | def test_that_nested_json_that_is_not_in_list_is_handled_correctly(): 190 | raw_json = '{"title": "book", "author": {"name": "George"}}' 191 | expected = Book('book', Author('George')) 192 | actual = Book.from_json(raw_json) 193 | assert expected == actual 194 | 195 | 196 | def test_that_nested_json_that_is_not_in_list_with_wrong_type_throws(): 197 | raw_json = '{"title": "book", "author": {"name": 0}}' 198 | with pytest.raises(TypeError) as e_info: 199 | Book.from_json(raw_json) 200 | assert (("Book.author is expected to be , " 202 | "but value {'name': 0} is a dict with unexpected keys") == 203 | str(e_info.value)) 204 | 205 | 206 | def test_document_to_json_without_nulls(): 207 | expected = '{"name": "msa.doc"}' 208 | actual = Document('msa.doc').to_json() 209 | assert expected == actual 210 | 211 | 212 | def test_document_to_json_with_null(): 213 | expected = '{"name": "msa.doc", "paragraphs": null, "tags": null}' 214 | actual = Document('msa.doc').to_json(keep_none=True) 215 | assert expected == actual 216 | 217 | 218 | # Edge case tests 219 | 220 | 221 | @dataclass 222 | class NestedListWithUniformTypes(TypedJsonMixin): 223 | name: List[List[str]] 224 | 225 | 226 | def test_that_lists_with_uniform_elements_succeeds(): 227 | name = {'name': [['foo', 'bar'], ['baz', 'bing']]} 228 | expected = NestedListWithUniformTypes([['foo', 'bar'], ['baz', 'bing']]) 229 | actual = NestedListWithUniformTypes.from_dict(name) 230 | assert expected == actual 231 | 232 | 233 | def test_that_lists_with_non_uniform_elements_throws_exception(): 234 | name = {'name': [['str', 0]]} 235 | with pytest.raises(TypeError) as e_info: 236 | NestedListWithUniformTypes.from_dict(name) 237 | assert ("NestedListWithUniformTypes.name is [['str', 0]] which does not " 238 | 'match typing.List[typing.List[str]]. ' 239 | 'Unfortunately, we are unable to infer the explicit type of ' 240 | 'NestedListWithUniformTypes.name') \ 241 | == str(e_info.value) 242 | 243 | 244 | @dataclass 245 | class MissingListType(TypedJsonMixin): 246 | some_list: List = None 247 | 248 | 249 | def test_that_json_with_untyped_list_throws_exception(): 250 | with pytest.raises(TypeError) as e_info: 251 | MissingListType.from_json('{"some_list": []}') 252 | assert ("MissingListType.some_list was defined as a , " 253 | 'but is missing information about the type of the elements inside ' 254 | 'it') == str(e_info.value) 255 | 256 | 257 | @dataclass 258 | class UsesNativeList(TypedJsonMixin): 259 | some_list: list = None 260 | 261 | 262 | def test_that_dict_with_wrong_list_throws_exception(): 263 | with pytest.raises(TypeError) as e_info: 264 | UsesNativeList([]) 265 | assert ("UsesNativeList.some_list was defined as a , " 266 | 'but you must use typing.List[type] instead') == str(e_info.value) 267 | 268 | 269 | def test_that_json_with_wrong_list_throws_exception(): 270 | with pytest.raises(TypeError) as e_info: 271 | UsesNativeList.from_json('{"some_list": []}') 272 | assert ("UsesNativeList.some_list was defined as a , " 273 | 'but you must use typing.List[type] instead') == str(e_info.value) 274 | 275 | 276 | @dataclass 277 | class UsesNestedNativeList(TypedJsonMixin): 278 | some_list: List[List[list]] 279 | 280 | 281 | def test_nested_wrong(): 282 | with pytest.raises(TypeError) as e_info: 283 | UsesNestedNativeList([]) 284 | assert ('UsesNestedNativeList.some_list was detected to use a native ' 285 | 'Python collection in its type definition. ' 286 | 'We should only use typing.List[] for these') == str(e_info.value) 287 | 288 | 289 | @dataclass 290 | class ListOfTuples(TypedJsonMixin): 291 | name: List[tuple] 292 | 293 | 294 | def test_for_tuples(): 295 | with pytest.raises(TypeError) as e_info: 296 | ListOfTuples.from_dict({'name': []}) 297 | assert ('ListOfTuples.name was detected to use a native Python collection ' 298 | 'in its type definition. ' 299 | 'We should only use typing.List[] for these') == str(e_info.value) 300 | 301 | 302 | def test_that_if_a_list_of_natives_is_expected_but_dict_is_found_then_throw(): 303 | with pytest.raises(TypeError) as e_info: 304 | ListOfTuples.from_dict({'name': {}}) 305 | assert ('ListOfTuples.name was detected to use a native Python dict in ' 306 | 'its type definition. ' 307 | 'We should only use custom objects for these') == str(e_info.value) 308 | 309 | 310 | @dataclass 311 | class ListOfSet(TypedJsonMixin): 312 | name: List[set] 313 | 314 | 315 | def test_for_set(): 316 | with pytest.raises(TypeError) as e_info: 317 | ListOfSet.from_dict({'name': []}) 318 | assert ('ListOfSet.name was detected to use a native Python collection in ' 319 | 'its type definition. ' 320 | 'We should only use typing.List[] for these') == str(e_info.value) 321 | 322 | 323 | @dataclass 324 | class ListOfDict(TypedJsonMixin): 325 | name: List[dict] 326 | 327 | 328 | def test_for_dict(): 329 | with pytest.raises(TypeError) as e_info: 330 | ListOfDict.from_dict({'name': []}) 331 | assert ('ListOfDict.name was detected to use a native Python collection ' 332 | 'in its type definition. ' 333 | 'We should only use typing.List[] for these') == str(e_info.value) 334 | 335 | 336 | @dataclass 337 | class ListOfInt(TypedJsonMixin): 338 | name: List[int] 339 | 340 | 341 | def test_that_an_empty_dict_does_not_get_cast_to_a_different_type(): 342 | # We test dictionaries for being valid subjects, by attempting to 343 | # instantiate the expected type with the keys of the dictionary. 344 | # Unfortunately, calling `int(**{})` evaluates to 0, and similar for other 345 | # native types like str. Fortunately, an empty dictionary would never 346 | # instantiate to any subobject, unless the suboject had all default values. 347 | # If the subobject is trying to be used with all defaults, we're probably 348 | # using it incorrectly. 349 | # Thus, just prevent empty dictionaries 350 | with pytest.raises(TypeError) as e_info: 351 | ListOfInt([1, 2, {}]) 352 | assert ('ListOfInt.name was found to have an empty dictionary. An empty ' 353 | 'dictionary will not ' 354 | 'properly instantiate a nested object') == str(e_info.value) 355 | 356 | 357 | # More complex object tests 358 | 359 | @dataclass 360 | class Object3(TypedJsonMixin): 361 | name: str 362 | 363 | 364 | @dataclass 365 | class Object2(TypedJsonMixin): 366 | name: str 367 | subobjects: List[Object3] 368 | 369 | 370 | @dataclass 371 | class Object1(TypedJsonMixin): 372 | name: str 373 | subobjects: List[Object2] 374 | 375 | 376 | def test_complex_nested_lists(): 377 | objectThrees: List[Object3] = [Object3('o3_1'), Object3('o3_2')] 378 | objectTwos: List[Object2] = [Object2('o2_1', objectThrees)] 379 | objectOne: Object1 = Object1('o1_1', objectTwos) 380 | object_dict = Object1.from_dict({'name': 'o1_1', 'subobjects': 381 | [ 382 | {'name': 'o2_1', 'subobjects': [ 383 | {'name': 'o3_1'}, 384 | {'name': 'o3_2'} 385 | ]} 386 | ]}) 387 | assert object_dict.to_json() == objectOne.to_json() 388 | 389 | 390 | # Simulate a LinkedList style scenario 391 | @dataclass 392 | class Node(TypedJsonMixin): 393 | next_node: 'Node' = None 394 | 395 | 396 | def test_that_incorrect_recursive_definitions_throws(): 397 | with pytest.raises(TypeError) as e_info: 398 | Node(next_node='something else') 399 | assert ("Node.next_node was defined as a , but we " 400 | "found a instead") == str(e_info.value) 401 | 402 | 403 | def test_that_correct_recursive_definitions_is_handled(): 404 | root = Node(next_node=Node()) 405 | assert root.to_dict() == {'next_node': {'next_node': None}} 406 | 407 | 408 | # Simulate a Graph 409 | @dataclass 410 | class GraphNode(TypedJsonMixin): 411 | children: List['GraphNode'] 412 | 413 | 414 | def test_that_recursive_collection_is_handled(): 415 | expected = { 416 | 'children': [ 417 | { 418 | 'children': [] 419 | }, 420 | { 421 | 'children': [] 422 | } 423 | ] 424 | } 425 | assert (GraphNode(children=[GraphNode([]), GraphNode([])]).to_dict() == 426 | expected) 427 | 428 | 429 | def test_that_recursive_collection_with_non_matching_types_throws(): 430 | with pytest.raises(TypeError) as e_info: 431 | GraphNode(children=[GraphNode([]), 'not a GraphNode']) 432 | assert ("GraphNode.children is [GraphNode(children=[]), 'not a " 433 | "GraphNode'] which does not match typing.List[ForwardRef" 434 | "('GraphNode')]. Unfortunately, we are unable to infer the " 435 | 'explicit type of GraphNode.children') == str(e_info.value) 436 | 437 | 438 | def test_deeply_nested_forward_references_are_handled(): 439 | expected = { 440 | 'children': [ 441 | { 442 | 'children': [ 443 | { 444 | 'children': [] 445 | } 446 | ] 447 | } 448 | ] 449 | } 450 | assert (GraphNode( 451 | children=[GraphNode(children=[GraphNode(children=[])])] 452 | ).to_dict() 453 | == expected) 454 | 455 | 456 | def test_deeply_nested_forward_references__with_non_matching_types_throws(): 457 | with pytest.raises(TypeError) as e_info: 458 | GraphNode(children=[GraphNode(children=[GraphNode(children=['bad'])])]) 459 | assert ("GraphNode.children is ['bad'] which does not match " 460 | "typing.List[ForwardRef('GraphNode')]. Unfortunately, we are " 461 | 'unable to infer the explicit type of GraphNode.' 462 | 'children') == str(e_info.value) 463 | -------------------------------------------------------------------------------- /tests/test_utils.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from typed_json_dataclass.utils import ( 3 | to_snake as to_s, 4 | to_camel as to_c, 5 | ) 6 | 7 | 8 | @pytest.mark.parametrize('target, expected', [ 9 | ('hereIsSomethingInCamelCase', 'here_is_something_in_camel_case'), 10 | ('hereIsSomeThingInCamelCase', 'here_is_some_thing_in_camel_case'), 11 | ('_here_is_something_in_snake_case', '_here_is_something_in_snake_case'), 12 | ('HereIsSomethingInTitleCase', 'here_is_something_in_title_case') 13 | ]) 14 | def test_to_snake_case(target, expected): 15 | actual = to_s(target) 16 | assert expected == actual 17 | 18 | 19 | @pytest.mark.parametrize('target, expected', [ 20 | ('here_is_something_in_camel_case', 'hereIsSomethingInCamelCase'), 21 | ('_here_is_something_in_camel_case', 'hereIsSomethingInCamelCase'), 22 | ('___here_is_something_in_camel_case', 'hereIsSomethingInCamelCase'), 23 | ('___here___is_something_in_camel__case', 'hereIsSomethingInCamelCase'), 24 | ('HereIsSomethingInTitleCase', 'hereIsSomethingInTitleCase') 25 | ]) 26 | def test_to_camel_case(target, expected): 27 | actual = to_c(target) 28 | assert expected == actual 29 | -------------------------------------------------------------------------------- /typed_json_dataclass/__init__.py: -------------------------------------------------------------------------------- 1 | from typed_json_dataclass.typed_json_dataclass import ( 2 | TypedJsonMixin, 3 | MappingMode, 4 | ) 5 | 6 | __version__ = '0.2.2' 7 | __all__ = [ 8 | 'TypedJsonMixin', 9 | 'MappingMode', 10 | ] 11 | -------------------------------------------------------------------------------- /typed_json_dataclass/typed_json_dataclass.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3.7 2 | import json 3 | import typing 4 | from dataclasses import InitVar, MISSING, asdict, fields, is_dataclass 5 | from enum import Enum 6 | from warnings import warn 7 | 8 | from typed_json_dataclass.utils import to_camel, to_snake, recursive_rename 9 | 10 | 11 | class MappingMode(Enum): 12 | SnakeCase = 1 13 | CamelCase = 2 14 | NoMap = 3 15 | 16 | 17 | class TypedJsonMixin: 18 | """ 19 | A very small Mixin that we can use in conjunction with Python 3.7 20 | @dataclass in order to get typed DTO validation. 21 | """ 22 | 23 | def __post_init__(self): 24 | """Validation logic that runs after an object has been instantiated. 25 | 26 | Based heavily on: 27 | https://stackoverflow.com/questions/50563546/validating-detailed-types-in-python-dataclasses 28 | """ 29 | for field_def in fields(self): 30 | field_name = field_def.name 31 | field_value = getattr(self, field_name) 32 | actual_type = type(field_value) 33 | 34 | if hasattr(field_def.type, '__origin__'): 35 | # If a type hint uses typing.List, we need to check the origin 36 | # in order to see that it's a list 37 | expected_type = field_def.type.__origin__ 38 | else: 39 | expected_type = field_def.type 40 | 41 | # Lists are a special case, because we have to get the list element 42 | # type in a different way 43 | if field_value is not None: 44 | class_name = self.__class__.__name__ 45 | 46 | # A ForwardRef will appear to just be a str 47 | # Check that the expected type is a str instead of an actual 48 | # type definition, and check that the name of the current class 49 | # matches the string in the ForwardRef. 50 | if (class_name == expected_type and 51 | isinstance(expected_type, str)): 52 | # Double check that the type itself and the current class 53 | # are the same 54 | if actual_type != self.__class__: 55 | raise TypeError((f'{class_name}.{field_name} was ' 56 | 'defined as a , " 58 | f'but we found a {actual_type} ' 59 | 'instead')) 60 | else: 61 | # Optionals are technically just Union[T, None] 62 | if expected_type == typing.Union: 63 | possible_types = field_def.type.__args__ 64 | matches = (isinstance(field_value, possible_type) for 65 | possible_type in possible_types) 66 | if not any(matches): 67 | raise TypeError((f'{class_name}.{field_name} was ' 68 | 'defined to be any of: ' 69 | f'{possible_types} but was found ' 70 | f'to be {actual_type} instead')) 71 | 72 | elif (isinstance(field_value, expected_type) and 73 | isinstance(field_value, list)): 74 | if not hasattr(field_def.type, '__args__'): 75 | raise TypeError((f'{class_name}.{field_name} was ' 76 | f'defined as a {actual_type}, ' 77 | 'but you must use ' 78 | 'typing.List[type] ' 79 | 'instead')) 80 | 81 | expected_element_type = field_def.type.__args__[0] 82 | if isinstance(expected_element_type, typing.TypeVar): 83 | raise TypeError((f'{class_name}.{field_name} was ' 84 | f'defined as a {actual_type}, ' 85 | 'but is missing information ' 86 | 'about the' 87 | ' type of the elements inside ' 88 | 'it')) 89 | 90 | if not self._ensure_no_native_collections( 91 | expected_element_type 92 | ): 93 | raise TypeError(((f'{class_name}.{field_name} was ' 94 | 'detected to use a native ' 95 | 'Python ' 96 | 'collection in its type ' 97 | 'definition. ' 98 | 'We should only use ' 99 | 'typing.List[] ' 100 | 'for these'))) 101 | 102 | for i, element in enumerate(field_value): 103 | if isinstance(element, dict): 104 | if not element: 105 | raise TypeError(((f'{class_name}.' 106 | f'{field_name} ' 107 | 'was found to have an ' 108 | 'empty dictionary. An ' 109 | 'empty ' 110 | 'dictionary will not ' 111 | 'properly instantiate a ' 112 | 'nested object'))) 113 | 114 | # Set reference of the specific list index. 115 | # Kind of a hack, to get around the fact that 116 | # __setattr__ can only seem to take field 117 | # names, but not indices 118 | getattr( 119 | self, field_name 120 | )[i] = expected_element_type(**element) 121 | 122 | if not self._validate_list_types( 123 | field_value, field_def.type 124 | ): 125 | raise TypeError((f'{class_name}.{field_name} is ' 126 | f'{field_value} which does not ' 127 | 'match ' 128 | f'{field_def.type}. ' 129 | 'Unfortunately, ' 130 | 'we are unable to infer the ' 131 | 'explicit ' 132 | f'type of {class_name}.' 133 | f'{field_name}')) 134 | 135 | elif not isinstance(field_value, expected_type): 136 | if isinstance(field_value, dict): 137 | if not self._ensure_no_native_collections( 138 | expected_type 139 | ): 140 | raise TypeError((f'{class_name}.{field_name} ' 141 | 'was ' 142 | 'detected to use a native ' 143 | 'Python ' 144 | 'dict in its type ' 145 | 'definition. ' 146 | 'We should only use custom ' 147 | 'objects for these')) 148 | try: 149 | setattr( 150 | self, 151 | field_name, 152 | expected_type(**field_value) 153 | ) 154 | except TypeError: 155 | raise TypeError(f'{class_name}.{field_name} ' 156 | 'is ' 157 | 'expected to be ' 158 | f'{expected_type}, but value ' 159 | f'{field_value} is a dict ' 160 | 'with unexpected keys') 161 | else: 162 | raise TypeError(f'{class_name}.{field_name} is ' 163 | 'expected to be ' 164 | f'{expected_type}, but value ' 165 | f'{field_value} with ' 166 | f'type {actual_type} was found ' 167 | 'instead') 168 | 169 | def _ensure_no_native_collections(self, expected_type): 170 | """ 171 | Recursively drills down a type hint like List[List[list]] to make 172 | sure we never use a native collections. 173 | """ 174 | if hasattr(expected_type, '__origin__'): 175 | return self._ensure_no_native_collections( 176 | expected_type.__args__[0] 177 | ) 178 | else: 179 | return expected_type not in {dict, list, set, tuple} 180 | 181 | def _validate_list_types(self, actual_value, expected_type): 182 | """ 183 | Recursively checks nested lists like List[List[str]] and checks that 184 | all elements in the list are uniform 185 | """ 186 | # typing.List[type] will have __args__ 187 | if isinstance(actual_value, list) and \ 188 | hasattr(expected_type, '__args__'): 189 | nested_type = expected_type.__args__[0] 190 | if isinstance(nested_type, typing.ForwardRef): 191 | # Strip out ForwardRef(' and ') as a hack for getting the 192 | # expected class 193 | type_for_forward_ref = str(nested_type)[12:-2] 194 | return all( 195 | type_for_forward_ref == v.__class__.__name__ 196 | for v in actual_value 197 | ) 198 | 199 | return all( 200 | self._validate_list_types(v, nested_type) for v in actual_value 201 | ) 202 | else: 203 | return isinstance(actual_value, expected_type) 204 | 205 | @classmethod 206 | def _contains_non_default_init_vars(cls, previous_classes=None): 207 | """Check whether this dataclass contains non-default init-only vars. 208 | 209 | Performs a recursive check through all fields that are declared as 210 | dataclasses to ensure that no nested dataclasses contain init-only 211 | variables. The ``previous_classes`` argument is a set of previously 212 | checked classes to prevent infinite recursion on recursive structures. 213 | 214 | :param previous_classes: The set of previously checked classes. 215 | """ 216 | try: 217 | previous_classes.add(cls) 218 | except AttributeError: # NoneType 219 | previous_classes = {cls} 220 | 221 | # The identify check (.. is MISSING) is fine, MISSING is a singleton 222 | has_init_vars = any(field.type == InitVar and field.default is MISSING 223 | for field in cls.__dataclass_fields__.values()) 224 | children_have_init_vars = any( 225 | child.type._contains_non_default_init_vars(previous_classes) 226 | for child in fields(cls) 227 | if (is_dataclass(child.type) 228 | and child.type not in previous_classes)) 229 | return has_init_vars or children_have_init_vars 230 | 231 | @classmethod 232 | def from_dict(cls, raw_dict, *, mapping_mode=MappingMode.NoMap): 233 | """Given a python dict, create an instance of the implementing class. 234 | 235 | :raw_dict: A dictionary that represents the DTO to create 236 | :mapping_mode: Format for properties 237 | :returns: Returns an instance of the DTO, instantiated via the dict 238 | """ 239 | 240 | if not isinstance(mapping_mode, MappingMode): 241 | raise ValueError('Invalid mapping mode') 242 | 243 | if cls._contains_non_default_init_vars(): 244 | raise TypeError('Cannot instantiate a dataclass with non-default ' 245 | 'init-only variables') 246 | 247 | if mapping_mode == MappingMode.NoMap: 248 | return cls(**raw_dict) 249 | 250 | format_method = to_snake if mapping_mode == MappingMode.SnakeCase \ 251 | else to_camel 252 | mapped_dict = recursive_rename(raw_dict, format_method) 253 | return cls(**mapped_dict) 254 | 255 | @classmethod 256 | def from_json(cls, raw_json, *, mapping_mode=MappingMode.NoMap): 257 | """Given a raw json string, create an instance of the implementing class. 258 | 259 | :raw_json: A json string that represents the DTO to create 260 | :mapping_mode: Format for properties 261 | :returns: Returns an instance of the DTO, instantiated via the json 262 | """ 263 | 264 | return cls.from_dict(json.loads(raw_json), mapping_mode=mapping_mode) 265 | 266 | def to_dict(self, *, keep_none=False, mapping_mode=MappingMode.NoMap, 267 | warn_on_initvar=True): 268 | """Express the DTO as a dictionary. 269 | 270 | :keep_none: Filter keys that are None 271 | :mapping_mode: Format for properties 272 | :warn_on_initvar: Emit a warning if the instance contains non-default 273 | init-only variables. 274 | :returns: Returns the instantiated DTO as a dictionary 275 | """ 276 | if not isinstance(mapping_mode, MappingMode): 277 | raise ValueError('Invalid mapping mode') 278 | 279 | if self._contains_non_default_init_vars() and warn_on_initvar: 280 | warn('Dataclasses with init-only variables cannot be ' 281 | 're-instantiated from a dict or JSON string') 282 | 283 | self_dict = None 284 | mapped_dict = {} 285 | 286 | if keep_none: 287 | self_dict = asdict(self) 288 | else: 289 | self_dict = {k: v for k, v in asdict(self).items() 290 | if v is not None} 291 | 292 | if mapping_mode == MappingMode.NoMap: 293 | return self_dict 294 | 295 | format_method = to_snake if mapping_mode == MappingMode.SnakeCase \ 296 | else to_camel 297 | mapped_dict = recursive_rename(self_dict, format_method) 298 | return mapped_dict 299 | 300 | def to_json(self, *, keep_none=False, mapping_mode=MappingMode.NoMap, 301 | warn_on_initvar=True): 302 | """Express the DTO as a json string. 303 | 304 | :keep_none: Filter keys that are None 305 | :mapping_mode: Format for properties 306 | :warn_on_initvar: Emit a warning if the instance contains non-default 307 | init-only variables. 308 | :returns: Returns the instantiated DTO as a json string 309 | """ 310 | return json.dumps(self.to_dict( 311 | keep_none=keep_none, 312 | mapping_mode=mapping_mode, 313 | warn_on_initvar=warn_on_initvar)) 314 | -------------------------------------------------------------------------------- /typed_json_dataclass/utils.py: -------------------------------------------------------------------------------- 1 | def to_snake(string_to_convert: str): 2 | converted = '' 3 | for i, c in enumerate(string_to_convert): 4 | if c == c.lower(): 5 | converted += c 6 | else: 7 | if 0 < i: 8 | converted += '_' 9 | converted += c.lower() 10 | return converted 11 | 12 | 13 | def to_camel(string_to_convert: str): 14 | converted = '' 15 | next_is_capital = False 16 | for i, c in enumerate(string_to_convert): 17 | if c == '_': 18 | next_is_capital = i != 0 and converted != '' 19 | continue 20 | 21 | if c == c.upper(): 22 | next_is_capital = True 23 | 24 | if i == 0: 25 | converted += c.lower() 26 | next_is_capital = False 27 | continue 28 | 29 | if next_is_capital: 30 | converted += c.upper() 31 | next_is_capital = False 32 | else: 33 | converted += c.lower() 34 | 35 | return converted 36 | 37 | 38 | def recursive_rename(raw_dict, format_method): 39 | renamed_dict = {} 40 | for k, v in raw_dict.items(): 41 | if isinstance(v, dict): 42 | v = recursive_rename(v, format_method) 43 | renamed_dict[format_method(k)] = v 44 | return renamed_dict 45 | --------------------------------------------------------------------------------