├── tests
├── __init__.py
├── testdata
│ ├── __init__.py
│ ├── test1.json
│ ├── test3.json
│ ├── test6.json
│ ├── test2.json
│ ├── test5.json
│ ├── star_wars.json
│ ├── test8.json
│ ├── test7.json
│ └── test4.json
├── unit
│ ├── utils
│ │ ├── __init__.py
│ │ ├── test_lazy_loader.py
│ │ ├── test_typing_compat.py
│ │ └── test_string_conv.py
│ ├── v1
│ │ ├── __init__.py
│ │ ├── test_union_as_type_alias_recursive.py
│ │ └── test_hooks.py
│ ├── environ
│ │ ├── __init__.py
│ │ ├── .env.test
│ │ ├── .env.prefix
│ │ ├── .env.prod
│ │ ├── test_dumpers.py
│ │ ├── test_lookups.py
│ │ └── test_loaders.py
│ ├── __init__.py
│ ├── test_parsers.py
│ ├── test_frozen_inheritance.py
│ ├── conftest.py
│ ├── test_hooks.py
│ ├── test_models.py
│ └── test_property_wizard_with_future_import.py
└── conftest.py
├── benchmarks
├── __init__.py
├── catch_all.png
├── conftest.py
└── catch_all.py
├── dataclass_wizard
├── environ
│ ├── __init__.py
│ ├── lookups.pyi
│ ├── wizard.pyi
│ └── loaders.py
├── utils
│ ├── __init__.py
│ ├── wrappers.py
│ ├── json_util.py
│ ├── dataclass_compat.py
│ ├── lazy_loader.py
│ ├── object_path.pyi
│ ├── dict_helper.py
│ └── object_path.py
├── py.typed
├── wizard_cli
│ └── __init__.py
├── log.py
├── __version__.py
├── v1
│ ├── __init__.py
│ └── enums.py
├── lazy_imports.py
├── enums.py
├── constants.py
├── wizard_mixins.pyi
├── bases_meta.pyi
├── __init__.py
├── type_def.py
└── serial_json.py
├── docs
├── history.rst
├── readme.rst
├── contributing.rst
├── _static
│ ├── custom.css
│ ├── dark_mode_toggle.js
│ └── dark_mode.css
├── modules.rst
├── advanced_usage
│ ├── index.rst
│ └── serializer_hooks.rst
├── common_use_cases
│ ├── index.rst
│ ├── custom_types.rst
│ ├── skip_the_str.rst
│ ├── easier_debug_mode.rst
│ ├── skip_inheritance.rst
│ ├── cyclic_or_recursive_dataclasses.rst
│ ├── handling_unknown_json_keys.rst
│ └── nested_key_paths.rst
├── requirements.txt
├── _templates
│ ├── sidebar_modindex.html
│ ├── hacks.html
│ └── sidebarintro.html
├── index.rst
├── Makefile
├── dataclass_wizard.wizard_cli.rst
├── make.bat
├── dataclass_wizard.v1.rst
├── dataclass_wizard.environ.rst
├── installation.rst
├── dataclass_wizard.utils.rst
├── quickstart.rst
├── dataclass_wizard.rst
├── python_compatibility.rst
├── wiz_cli.rst
└── examples.rst
├── images
└── logo.png
├── .env
├── .pre-commit-config.yaml
├── .github
├── ISSUE_TEMPLATE.md
├── FUNDING.yml
└── workflows
│ ├── dev.yml
│ └── release.yml
├── MANIFEST.in
├── .bumpversion.toml
├── LICENSE
├── .editorconfig
├── .pyup.yml
├── .readthedocs.yaml
├── .gitignore
├── run_bench.py
├── recipe
└── meta.yaml
├── Makefile
└── CONTRIBUTING.rst
/tests/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/benchmarks/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/tests/testdata/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/tests/unit/utils/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/tests/unit/v1/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/tests/unit/environ/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/dataclass_wizard/environ/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/dataclass_wizard/utils/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/docs/history.rst:
--------------------------------------------------------------------------------
1 | .. include:: ../HISTORY.rst
2 |
--------------------------------------------------------------------------------
/docs/readme.rst:
--------------------------------------------------------------------------------
1 | .. include:: ../README.rst
2 |
--------------------------------------------------------------------------------
/docs/contributing.rst:
--------------------------------------------------------------------------------
1 | .. include:: ../CONTRIBUTING.rst
2 |
--------------------------------------------------------------------------------
/tests/unit/__init__.py:
--------------------------------------------------------------------------------
1 | """Unit test package for dataclass_wizard."""
2 |
--------------------------------------------------------------------------------
/images/logo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/rnag/dataclass-wizard/HEAD/images/logo.png
--------------------------------------------------------------------------------
/tests/unit/environ/.env.test:
--------------------------------------------------------------------------------
1 | myValue=1.23
2 | Another_Date=1639763585
3 | my_dt=1651077045
4 |
--------------------------------------------------------------------------------
/benchmarks/catch_all.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/rnag/dataclass-wizard/HEAD/benchmarks/catch_all.png
--------------------------------------------------------------------------------
/dataclass_wizard/py.typed:
--------------------------------------------------------------------------------
1 | # PEP-561 marker https://mypy.readthedocs.io/en/latest/installed_packages.html
2 |
--------------------------------------------------------------------------------
/dataclass_wizard/wizard_cli/__init__.py:
--------------------------------------------------------------------------------
1 | from .cli import main
2 | from .schema import PyCodeGenerator
3 |
--------------------------------------------------------------------------------
/docs/_static/custom.css:
--------------------------------------------------------------------------------
1 | .bold-code {
2 | font-family: monospace;
3 | font-weight: bold;
4 | }
5 |
--------------------------------------------------------------------------------
/tests/unit/environ/.env.prefix:
--------------------------------------------------------------------------------
1 | MY_PREFIX_STR='my prefix value'
2 | MY_PREFIX_BOOL=t
3 | MY_PREFIX_INT='123.0'
4 |
5 |
--------------------------------------------------------------------------------
/.env:
--------------------------------------------------------------------------------
1 | # These values are used in unit tests (tests/unit/test_env_wizard.py)
2 | MY_STR=42
3 | my_time=15:20
4 | MyDate=2022-01-21
5 |
--------------------------------------------------------------------------------
/docs/modules.rst:
--------------------------------------------------------------------------------
1 | dataclass_wizard
2 | ================
3 |
4 | .. toctree::
5 | :maxdepth: 4
6 |
7 | dataclass_wizard
8 |
--------------------------------------------------------------------------------
/docs/advanced_usage/index.rst:
--------------------------------------------------------------------------------
1 | Advanced Usage
2 | ==============
3 |
4 | .. toctree::
5 | :maxdepth: 4
6 | :glob:
7 |
8 | *
9 |
--------------------------------------------------------------------------------
/docs/common_use_cases/index.rst:
--------------------------------------------------------------------------------
1 | Common Use Cases
2 | ================
3 |
4 | .. toctree::
5 | :maxdepth: 2
6 | :glob:
7 |
8 | *
9 |
--------------------------------------------------------------------------------
/tests/unit/environ/.env.prod:
--------------------------------------------------------------------------------
1 | My_Value=3.21
2 | # These value overrides the one in another dotenv file (../../.env)
3 | MY_STR='hello world!'
4 |
--------------------------------------------------------------------------------
/docs/requirements.txt:
--------------------------------------------------------------------------------
1 | sphinx-issues==5.0.0
2 | sphinx-autodoc-typehints==2.5.0; python_version>="3.10"
3 | sphinx-copybutton==0.5.2
4 | typing-extensions>=4.9.0
5 |
--------------------------------------------------------------------------------
/tests/testdata/test1.json:
--------------------------------------------------------------------------------
1 | {
2 | "key": "value",
3 | "intKey": 20,
4 | "floatKey": 1.23,
5 | "my_dict": {
6 | "key2": "value!"
7 | }
8 | }
9 |
--------------------------------------------------------------------------------
/dataclass_wizard/log.py:
--------------------------------------------------------------------------------
1 | from logging import getLogger
2 |
3 | from .constants import LOG_LEVEL, PACKAGE_NAME
4 |
5 |
6 | LOG = getLogger(PACKAGE_NAME)
7 | LOG.setLevel(LOG_LEVEL)
8 |
--------------------------------------------------------------------------------
/tests/testdata/test3.json:
--------------------------------------------------------------------------------
1 | [
2 | 1,
3 | 2,
4 | "str",
5 | true,
6 | {"trueStory": "once upon a time..."},
7 | {"trueBool": true, "true_story": 2, "my_list": [1, {"hey": "world"}]}
8 | ]
9 |
--------------------------------------------------------------------------------
/tests/testdata/test6.json:
--------------------------------------------------------------------------------
1 | {
2 | "my_field": "testing",
3 | "anotherField": "2021-01-12",
4 | "MyList": [
5 | 1, 2, 3, {"another_Key": "value"}, [{"key": "value", "myTime": "03:20"}]
6 | ]
7 | }
8 |
--------------------------------------------------------------------------------
/docs/_templates/sidebar_modindex.html:
--------------------------------------------------------------------------------
1 |
2 |
6 |
--------------------------------------------------------------------------------
/.pre-commit-config.yaml:
--------------------------------------------------------------------------------
1 | repos:
2 | - repo: https://github.com/rstcheck/rstcheck
3 | rev: v6.2.4
4 | hooks:
5 | - id: rstcheck
6 | name: rstcheck (README.rst)
7 | args: ["--report-level=warning"]
8 | files: ^README\.rst$
9 |
--------------------------------------------------------------------------------
/tests/testdata/test2.json:
--------------------------------------------------------------------------------
1 | [
2 | {"key": "value"},
3 | {"key": null, "anotherKey": "something else", "truth": 4},
4 | {"my_list": {}},
5 | {"my_date": "2021-12-31T04:32:34", "another-key": null},
6 | {"another_Key": 32, "my-id": "testing"},
7 | 3,
8 | "hello!"
9 | ]
10 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE.md:
--------------------------------------------------------------------------------
1 | * Dataclass Wizard version:
2 | * Python version:
3 | * Operating System:
4 |
5 | ### Description
6 |
7 | Describe what you were trying to get done.
8 | Tell us what happened, what went wrong, and what you expected to happen.
9 |
10 | ### What I Did
11 |
12 | ```
13 | Paste the command(s) you ran and the output.
14 | If there was a crash, please include the traceback here.
15 | ```
16 |
--------------------------------------------------------------------------------
/benchmarks/conftest.py:
--------------------------------------------------------------------------------
1 | import pytest
2 |
3 |
4 | @pytest.fixture(scope='session')
5 | def n():
6 | return 100_000
7 |
8 |
9 | def pytest_addoption(parser):
10 | parser.addoption(
11 | "--all", # long option
12 | "-A",
13 | action="store_true",
14 | default=False,
15 | help="Run benchmarks for *all* libraries, including *slower* ones like `jsons`",
16 | )
17 |
--------------------------------------------------------------------------------
/MANIFEST.in:
--------------------------------------------------------------------------------
1 | include CONTRIBUTING.rst
2 | include HISTORY.rst
3 | include LICENSE
4 | include README.rst
5 | include dataclass_wizard/py.typed
6 |
7 | recursive-include tests *.py
8 | recursive-exclude tests/integration *
9 | recursive-exclude * __pycache__
10 | recursive-exclude * *.py[co]
11 |
12 | recursive-include docs *.rst conf.py Makefile make.bat *.jpg *.png *.gif
13 | recursive-include dataclass_wizard *.pyi
14 |
--------------------------------------------------------------------------------
/tests/testdata/test5.json:
--------------------------------------------------------------------------------
1 | [
2 | [
3 | [
4 | "hello",
5 | "world",
6 | {"key": 123,
7 | "nested_classes": {
8 | "blah": "test",
9 | "another-one": [{"testing": "world"}]
10 | }
11 | },
12 | {"key": 123,
13 | "nested_classes": {"Just something with a space": 0}
14 | }
15 | ],
16 | 123,
17 | "testing"
18 | ],
19 | {"key": "value"}
20 | ]
21 |
--------------------------------------------------------------------------------
/dataclass_wizard/__version__.py:
--------------------------------------------------------------------------------
1 | """
2 | Dataclass Wizard - a set of wizarding tools for interacting with `dataclasses`
3 | """
4 |
5 | __title__ = 'dataclass-wizard'
6 |
7 | __description__ = ('Lightning-fast JSON wizardry for Python dataclasses — '
8 | 'effortless serialization right out of the box!')
9 | __url__ = 'https://github.com/rnag/dataclass-wizard'
10 | __version__ = '0.37.0'
11 | __author__ = 'Ritvik Nag'
12 | __author_email__ = 'me@ritviknag.com'
13 | __license__ = 'Apache 2.0'
14 | __copyright__ = 'Copyright 2021-2025 Ritvik Nag'
15 |
--------------------------------------------------------------------------------
/dataclass_wizard/utils/wrappers.py:
--------------------------------------------------------------------------------
1 | """
2 | Wrapper utilities
3 | """
4 | from typing import Callable
5 |
6 |
7 | class FuncWrapper:
8 | """
9 | Wraps a callable `f` - which is occasionally useful, for example when
10 | defining functions as :class:`Enum` values. See below answer for more
11 | details.
12 |
13 | https://stackoverflow.com/a/40339397/10237506
14 | """
15 | __slots__ = ('f', )
16 |
17 | def __init__(self, f: Callable):
18 | self.f = f
19 |
20 | def __call__(self, *args, **kwargs):
21 | return self.f(*args, **kwargs)
22 |
--------------------------------------------------------------------------------
/tests/testdata/star_wars.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "Yavin IV",
3 | "rotation_period": "24",
4 | "orbital_period": "4818",
5 | "diameter": "10200",
6 | "climate": "temperate, tropical",
7 | "gravity": "1 standard",
8 | "terrain": "jungle, rainforests",
9 | "surface_water": "8",
10 | "population": "1000",
11 | "residents": [],
12 | "films": [
13 | "https://swapi.co/api/films/1/"
14 | ],
15 | "created": "2014-12-10T11:37:19.144000Z",
16 | "edited": "2014-12-20T20:58:18.421000Z",
17 | "url": "https://swapi.co/api/planets/3/"
18 | }
19 |
--------------------------------------------------------------------------------
/tests/unit/environ/test_dumpers.py:
--------------------------------------------------------------------------------
1 | import os
2 |
3 | from dataclass_wizard import EnvWizard, json_field
4 |
5 |
6 | def test_dump_with_excluded_fields_and_skip_defaults():
7 |
8 | os.environ['MY_FIRST_STR'] = 'hello'
9 | os.environ['my-second-str'] = 'world'
10 |
11 | class TestClass(EnvWizard, reload_env=True):
12 | my_first_str: str
13 | my_second_str: str = json_field(..., dump=False)
14 | my_int: int = 123
15 |
16 | assert TestClass(_reload=True).to_dict(
17 | exclude=['my_first_str'],
18 | skip_defaults=True,
19 | ) == {}
20 |
--------------------------------------------------------------------------------
/.bumpversion.toml:
--------------------------------------------------------------------------------
1 | [tool.bumpversion]
2 | current_version = "0.37.0"
3 | parse = "(?P\\d+)\\.(?P\\d+)\\.(?P\\d+)"
4 | serialize = ["{major}.{minor}.{patch}"]
5 |
6 | commit = true
7 | tag = true
8 | sign_tags = false
9 | tag_name = "v{new_version}"
10 | tag_message = "Bump version: {current_version} → {new_version}"
11 | allow_dirty = false
12 | message = "Bump version: {current_version} → {new_version}"
13 |
14 | [[tool.bumpversion.files]]
15 | filename = "dataclass_wizard/__version__.py"
16 | search = "__version__ = '{current_version}'"
17 | replace = "__version__ = '{new_version}'"
18 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Apache Software License 2.0
2 |
3 | Copyright (c) 2021, Ritvik Nag
4 |
5 | Licensed under the Apache License, Version 2.0 (the "License");
6 | you may not use this file except in compliance with the License.
7 | You may obtain a copy of the License at
8 |
9 | http://www.apache.org/licenses/LICENSE-2.0
10 |
11 | Unless required by applicable law or agreed to in writing, software
12 | distributed under the License is distributed on an "AS IS" BASIS,
13 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 | See the License for the specific language governing permissions and
15 | limitations under the License.
16 |
17 |
--------------------------------------------------------------------------------
/tests/unit/test_parsers.py:
--------------------------------------------------------------------------------
1 | import pytest
2 |
3 | from typing import Literal
4 |
5 | from dataclass_wizard.parsers import LiteralParser
6 |
7 |
8 | class TestLiteralParser:
9 | @pytest.fixture
10 | def literal_parser(self) -> LiteralParser:
11 | return LiteralParser(cls=object, base_type=Literal["foo"], extras={})
12 |
13 | def test_literal_parser_dunder_contains_succeeds_if_item_in_keys_of_base_type(self, literal_parser):
14 | assert "foo" in literal_parser
15 |
16 | def test_literal_parser_dunder_contains_fails_if_item_not_in_keys_of_base_type(self, literal_parser):
17 | assert "bar" not in literal_parser
18 |
--------------------------------------------------------------------------------
/.editorconfig:
--------------------------------------------------------------------------------
1 | # EditorConfig helps developers define and maintain consistent
2 | # coding styles between different editors and IDEs
3 | # http://editorconfig.org
4 |
5 | # top-most EditorConfig file
6 | root = true
7 |
8 | [*]
9 |
10 | indent_style = space
11 | indent_size = 4
12 |
13 | # Unix-style newlines with a newline ending every file
14 | charset = utf-8
15 | end_of_line = lf
16 | insert_final_newline = true
17 | trim_trailing_whitespace = true
18 |
19 | [*.bat]
20 | indent_style = tab
21 | end_of_line = crlf
22 |
23 | [{*.yml,*.yaml}]
24 | indent_size = 2
25 |
26 | [LICENSE]
27 | insert_final_newline = false
28 |
29 | [Makefile]
30 | indent_style = tab
31 |
--------------------------------------------------------------------------------
/docs/index.rst:
--------------------------------------------------------------------------------
1 | .. include:: readme.rst
2 |
3 | ..
4 | Create a "hidden" table of contents, so that Sphinx doesn't complain about
5 | documents not being included in any toctree; note that we actually have
6 | links in the sidebar, however Sphinx doesn't know about this.
7 |
8 | See also: https://stackoverflow.com/a/60491434/10237506
9 |
10 | .. toctree::
11 | :hidden:
12 |
13 | readme
14 | overview
15 | installation
16 | quickstart
17 | examples
18 | wiz_cli
19 | using_field_properties
20 | python_compatibility
21 | common_use_cases/index
22 | advanced_usage/index
23 | modules
24 | contributing
25 | history
26 |
--------------------------------------------------------------------------------
/docs/Makefile:
--------------------------------------------------------------------------------
1 | # Minimal makefile for Sphinx documentation
2 | #
3 |
4 | # You can set these variables from the command line.
5 | SPHINXOPTS =
6 | SPHINXBUILD = python -msphinx
7 | SPHINXPROJ = dataclass_wizard
8 | SOURCEDIR = .
9 | BUILDDIR = _build
10 |
11 | # Put it first so that "make" without argument is like "make help".
12 | help:
13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
14 |
15 | .PHONY: help Makefile
16 |
17 | # Catch-all target: route all unknown targets to Sphinx using the new
18 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
19 | %: Makefile
20 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
21 |
--------------------------------------------------------------------------------
/docs/common_use_cases/custom_types.rst:
--------------------------------------------------------------------------------
1 | Custom or Unsupported Types
2 | ===========================
3 |
4 | If you need to serialize or deserialize a type that is not supported out of the
5 | box (for example, `ipaddress.IPv4Address`_ or a domain-specific class),
6 | Dataclass Wizard provides **type hooks** to define custom load and dump behavior.
7 |
8 | Type hooks allow you to extend (de)serialization without modifying the type
9 | itself, and work with or without inheritance.
10 |
11 | See `Type Hooks`_ for details and examples.
12 |
13 | .. _Type Hooks: https://dcw.ritviknag.com/en/latest/advanced_usage/type_hooks.html
14 | .. _`ipaddress.IPv4Address`: https://docs.python.org/3/library/ipaddress.html#ipaddress.IPv4Address
15 |
--------------------------------------------------------------------------------
/docs/dataclass_wizard.wizard_cli.rst:
--------------------------------------------------------------------------------
1 | dataclass\_wizard.wizard\_cli package
2 | =====================================
3 |
4 | Submodules
5 | ----------
6 |
7 | dataclass\_wizard.wizard\_cli.cli module
8 | ----------------------------------------
9 |
10 | .. automodule:: dataclass_wizard.wizard_cli.cli
11 | :members:
12 | :undoc-members:
13 | :show-inheritance:
14 |
15 | dataclass\_wizard.wizard\_cli.schema module
16 | -------------------------------------------
17 |
18 | .. automodule:: dataclass_wizard.wizard_cli.schema
19 | :members:
20 | :undoc-members:
21 | :show-inheritance:
22 |
23 | Module contents
24 | ---------------
25 |
26 | .. automodule:: dataclass_wizard.wizard_cli
27 | :members:
28 | :undoc-members:
29 | :show-inheritance:
30 |
--------------------------------------------------------------------------------
/tests/testdata/test8.json:
--------------------------------------------------------------------------------
1 | [
2 | {
3 | "list_of_dictionaries": [
4 | {"my-energies": []},
5 | {"Key": "value", "myEnergies": [{"myTestVal": true}]},
6 | {"key": null, "myEnergies": []},
7 | {"myEnergies": [1, {"anotherVal": "testing", "my_test_val": 123}]},
8 | {"MyEnergies": [
9 | {"string_val": "hello world!"},
10 | "testing",
11 | {"mergedFloat": 1.23},
12 | 123
13 | ]}
14 | ]
15 | },
16 |
17 | [{"key": "value"}, {"anotherKey": "val"}],
18 | [{"question": "how should list of lists be merged (for example in this case with the above)?"}],
19 | [{"explanation": "Because it's not *clear* how the merge should happen in this situation."}]
20 | ]
21 |
--------------------------------------------------------------------------------
/tests/unit/test_frozen_inheritance.py:
--------------------------------------------------------------------------------
1 | from dataclasses import dataclass, is_dataclass
2 | from dataclass_wizard import JSONWizard
3 |
4 |
5 | def test_jsonwizard_is_not_a_dataclass_mixin():
6 | # If JSONWizard becomes a dataclass again, frozen subclasses can break.
7 | assert not is_dataclass(JSONWizard)
8 |
9 |
10 | def test_v1_frozen_dataclass_can_inherit_from_jsonwizard():
11 | @dataclass(eq=False, frozen=True)
12 | class BaseClass(JSONWizard):
13 | class _(JSONWizard.Meta):
14 | v1 = True
15 |
16 | x: int = 1
17 |
18 | obj = BaseClass()
19 | assert obj.x == 1
20 |
21 |
22 | def test_frozen_dataclass_can_inherit_from_jsonwizard():
23 | @dataclass(eq=False, frozen=True)
24 | class BaseClass(JSONWizard):
25 | x: int = 1
26 |
27 | obj = BaseClass()
28 | assert obj.x == 1
29 |
--------------------------------------------------------------------------------
/tests/testdata/test7.json:
--------------------------------------------------------------------------------
1 | [
2 | {
3 | "MyTestApis": [{"firstApi": "testing."}],
4 | "People": [
5 | {"name": "Ricardo", "Age": "21"},
6 | {"name": "Stephan", "age": 23}
7 | ],
8 | "children": [
9 | {"name": "Alice", "age": 8},
10 | {"name": "Jonas", "age": 12.4}
11 | ],
12 | "Activities": [{"name": "fishing"}],
13 | "Equipment": [{"count": 12}],
14 | "key": 123,
15 | "nested_classes": {
16 | "blah": "test",
17 | "another-one": [
18 | {
19 | "testing": "world"
20 | }
21 | ]
22 | }
23 | },
24 | {
25 | "something_else": "test",
26 | "nested_classes": {
27 | "Just something": 0
28 | }
29 | }
30 | ]
31 |
--------------------------------------------------------------------------------
/.pyup.yml:
--------------------------------------------------------------------------------
1 | # see https://pyup.io/docs/configuration/ for all available options
2 | update: all
3 | # configure dependency pinning globally
4 | pin: True
5 | # set the default branch
6 | branch: main
7 | # update schedule
8 | # allowed: "every day", "every week", ..
9 | schedule: 'every month'
10 | # search for requirement files
11 | search: False
12 | # specify requirement files by hand
13 | requirements:
14 | - requirements.txt:
15 | pin: False
16 | - requirements-dev.txt
17 | - requirements-test.txt
18 | - docs/requirements.txt
19 | # add a label to pull requests, default is not set
20 | label_prs: update
21 | # assign users to pull requests, default is not set
22 | assignees:
23 | - rnag
24 | # configure the branch prefix the bot is using
25 | # default: pyup-
26 | branch_prefix: pyup/
27 | # set a global prefix for PRs, default is not set
28 | # pr_prefix: "Bug #12345"
29 | # allow to close stale PRs
30 | close_prs: True
31 |
--------------------------------------------------------------------------------
/docs/make.bat:
--------------------------------------------------------------------------------
1 | @ECHO OFF
2 |
3 | pushd %~dp0
4 |
5 | REM Command file for Sphinx documentation
6 |
7 | if "%SPHINXBUILD%" == "" (
8 | set SPHINXBUILD=python -msphinx
9 | )
10 | set SOURCEDIR=.
11 | set BUILDDIR=_build
12 | set SPHINXPROJ=dataclass_wizard
13 |
14 | if "%1" == "" goto help
15 |
16 | %SPHINXBUILD% >NUL 2>NUL
17 | if errorlevel 9009 (
18 | echo.
19 | echo.The Sphinx module was not found. Make sure you have Sphinx installed,
20 | echo.then set the SPHINXBUILD environment variable to point to the full
21 | echo.path of the 'sphinx-build' executable. Alternatively you may add the
22 | echo.Sphinx directory to PATH.
23 | echo.
24 | echo.If you don't have Sphinx installed, grab it from
25 | echo.http://sphinx-doc.org/
26 | exit /b 1
27 | )
28 |
29 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS%
30 | goto end
31 |
32 | :help
33 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS%
34 |
35 | :end
36 | popd
37 |
--------------------------------------------------------------------------------
/docs/_templates/hacks.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
27 |
28 |
--------------------------------------------------------------------------------
/tests/unit/conftest.py:
--------------------------------------------------------------------------------
1 | """
2 | Common test fixtures and utilities.
3 | """
4 | from dataclasses import dataclass
5 | from uuid import UUID
6 |
7 | import pytest
8 |
9 |
10 | # Ref: https://docs.pytest.org/en/6.2.x/example/parametrize.html#parametrizing-conditional-raising
11 | from contextlib import nullcontext as does_not_raise
12 |
13 |
14 | @dataclass
15 | class SampleClass:
16 | """Sample dataclass model for various test scenarios."""
17 | f1: str
18 | f2: int
19 |
20 |
21 | class MyUUIDSubclass(UUID):
22 | """
23 | Simple UUID subclass that calls :meth:`hex` when ``str()`` is invoked.
24 | """
25 |
26 | def __str__(self):
27 | return self.hex
28 |
29 |
30 | @pytest.fixture
31 | def mock_log(caplog):
32 | caplog.set_level('INFO', logger='dataclass_wizard')
33 | return caplog
34 |
35 | @pytest.fixture
36 | def mock_debug_log(caplog):
37 | caplog.set_level('DEBUG', logger='dataclass_wizard')
38 | return caplog
39 |
--------------------------------------------------------------------------------
/docs/_static/dark_mode_toggle.js:
--------------------------------------------------------------------------------
1 | document.addEventListener("DOMContentLoaded", function () {
2 | const toggleButton = document.createElement("button");
3 | toggleButton.innerText = "🌓 Dark Mode";
4 | toggleButton.style.cssText = `
5 | position: fixed;
6 | bottom: 20px;
7 | right: 20px;
8 | padding: 8px 12px;
9 | background-color: #444;
10 | color: white;
11 | border: none;
12 | cursor: pointer;
13 | z-index: 1000;
14 | `;
15 |
16 | document.body.appendChild(toggleButton);
17 |
18 | toggleButton.addEventListener("click", function () {
19 | document.body.classList.toggle("dark-mode");
20 | localStorage.setItem("dark-mode", document.body.classList.contains("dark-mode"));
21 | });
22 |
23 | // Persist dark mode preference across pages
24 | if (localStorage.getItem("dark-mode") === "true") {
25 | document.body.classList.add("dark-mode");
26 | }
27 | });
28 |
--------------------------------------------------------------------------------
/.github/FUNDING.yml:
--------------------------------------------------------------------------------
1 | # These are supported funding model platforms
2 |
3 | github: [rnag] # Replace with up to 4 GitHub Sponsors-enabled usernames e.g., [user1, user2]
4 | patreon: # Replace with a single Patreon username
5 | open_collective: # Replace with a single Open Collective username
6 | ko_fi: # Replace with a single Ko-fi username
7 | tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel
8 | community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry
9 | liberapay: # Replace with a single Liberapay username
10 | issuehunt: # Replace with a single IssueHunt username
11 | lfx_crowdfunding: # Replace with a single LFX Crowdfunding project-name e.g., cloud-foundry
12 | polar: # Replace with a single Polar username
13 | buy_me_a_coffee: # Replace with a single Buy Me a Coffee username
14 | thanks_dev: # Replace with a single thanks.dev username
15 | custom: # Replace with up to 4 custom sponsorship URLs e.g., ['link1', 'link2']
16 |
--------------------------------------------------------------------------------
/tests/unit/utils/test_lazy_loader.py:
--------------------------------------------------------------------------------
1 | import pytest
2 | from pytest_mock import MockerFixture
3 |
4 | from dataclass_wizard.utils.lazy_loader import LazyLoader
5 |
6 |
7 | @pytest.fixture
8 | def mock_logging(mocker: MockerFixture):
9 | return mocker.patch('dataclass_wizard.utils.lazy_loader.logging')
10 |
11 |
12 | def test_lazy_loader_when_module_not_found():
13 | extra_name = 'my-extra'
14 |
15 | mod = LazyLoader(globals(), 'my_module', extra_name)
16 |
17 | with pytest.raises(ImportError) as e:
18 | _ = mod.my_var
19 |
20 | assert 'pip install' in e.value.msg
21 | assert extra_name in e.value.msg
22 |
23 |
24 | def test_lazy_loader_with_warning(mock_logging):
25 | warning_msg = 'My test warning'
26 |
27 | mod = LazyLoader(globals(), 'pytimeparse', warning=warning_msg)
28 |
29 | _ = mod.parse
30 |
31 | # Assert a warning is logged
32 | mock_logging.warning.assert_called_once_with(warning_msg)
33 |
34 | # Add for code coverage
35 | _ = dir(mod)
36 |
--------------------------------------------------------------------------------
/tests/unit/v1/test_union_as_type_alias_recursive.py:
--------------------------------------------------------------------------------
1 | from dataclasses import dataclass
2 |
3 | from dataclass_wizard import JSONWizard
4 |
5 |
6 | # noinspection PyCompatibility
7 | def test_union_as_type_alias_recursive():
8 | """
9 | Recursive or self-referential `Union` (defined as `TypeAlias`)
10 | types are supported.
11 | """
12 | type JSON = str | int | float | bool | dict[str, JSON] | list[JSON] | None
13 |
14 | @dataclass
15 | class MyTestClass(JSONWizard):
16 |
17 | class _(JSONWizard.Meta):
18 | v1 = True
19 |
20 | name: str
21 | meta: str
22 | msg: JSON
23 |
24 | x = MyTestClass.from_dict(
25 | {
26 | "name": "name",
27 | "meta": "meta",
28 | "msg": [{"x": {"x": [{"x": ["x", 1, 1.0, True, None]}]}}],
29 | }
30 | )
31 | assert x == MyTestClass(
32 | name="name",
33 | meta="meta",
34 | msg=[{"x": {"x": [{"x": ["x", 1, 1.0, True, None]}]}}],
35 | )
36 |
--------------------------------------------------------------------------------
/.readthedocs.yaml:
--------------------------------------------------------------------------------
1 | # Read the Docs configuration file for Sphinx projects
2 | # See https://docs.readthedocs.io/en/stable/config-file/v2.html for details
3 |
4 | # Required
5 | version: 2
6 |
7 | # Set the OS, Python version and other tools you might need
8 | build:
9 | os: ubuntu-22.04
10 | tools:
11 | python: "3.12"
12 |
13 | # Build documentation in the "docs/" directory with Sphinx
14 | sphinx:
15 | configuration: docs/conf.py
16 | # You can configure Sphinx to use a different builder, for instance use the dirhtml builder for simpler URLs
17 | # builder: "dirhtml"
18 | # Fail on all warnings to avoid broken references
19 | # fail_on_warning: true
20 |
21 | # Optionally build your docs in additional formats such as PDF and ePub
22 | # formats:
23 | # - pdf
24 | # - epub
25 |
26 | # Optional but recommended, declare the Python requirements required
27 | # to build your documentation
28 | # See https://docs.readthedocs.io/en/stable/guides/reproducible-builds.html
29 | python:
30 | install:
31 | - requirements: docs/requirements.txt
32 |
--------------------------------------------------------------------------------
/docs/common_use_cases/skip_the_str.rst:
--------------------------------------------------------------------------------
1 | Skip the :meth:`__str__`
2 | ========================
3 |
4 | .. note::
5 | It is now easier to view ``DEBUG``-level log messages from this library! Check out
6 | the `Easier Debug Mode `__ section.
7 |
8 | The ``JSONSerializable`` class implements a default
9 | ``__str__`` method if a sub-class doesn't already define
10 | this method. This method will format the dataclass
11 | instance as a prettified JSON string, for example whenever ``str(obj)``
12 | or ``print(obj)`` is called.
13 |
14 | If you want to opt out of this default ``__str__`` method,
15 | you can pass ``str=False`` as shown below:
16 |
17 |
18 | .. code:: python3
19 |
20 | from dataclasses import dataclass
21 |
22 | from dataclass_wizard import JSONSerializable
23 |
24 |
25 | @dataclass
26 | class MyClass(JSONSerializable, str=False):
27 | my_str: str = 'hello world'
28 | my_int: int = 2
29 |
30 |
31 | c = MyClass()
32 | print(c)
33 | # prints the same as `repr(c)`:
34 | # MyClass(my_str='hello world', my_int=2)
35 |
--------------------------------------------------------------------------------
/docs/dataclass_wizard.v1.rst:
--------------------------------------------------------------------------------
1 | dataclass\_wizard.v1 package
2 | ============================
3 |
4 | Submodules
5 | ----------
6 |
7 | dataclass\_wizard.v1.decorators module
8 | --------------------------------------
9 |
10 | .. automodule:: dataclass_wizard.v1.decorators
11 | :members:
12 | :undoc-members:
13 | :show-inheritance:
14 |
15 | dataclass\_wizard.v1.enums module
16 | ---------------------------------
17 |
18 | .. automodule:: dataclass_wizard.v1.enums
19 | :members:
20 | :undoc-members:
21 | :show-inheritance:
22 |
23 | dataclass\_wizard.v1.loaders module
24 | -----------------------------------
25 |
26 | .. automodule:: dataclass_wizard.v1.loaders
27 | :members:
28 | :undoc-members:
29 | :show-inheritance:
30 |
31 | dataclass\_wizard.v1.models module
32 | ----------------------------------
33 |
34 | .. automodule:: dataclass_wizard.v1.models
35 | :members:
36 | :undoc-members:
37 | :show-inheritance:
38 |
39 | Module contents
40 | ---------------
41 |
42 | .. automodule:: dataclass_wizard.v1
43 | :members:
44 | :undoc-members:
45 | :show-inheritance:
46 |
--------------------------------------------------------------------------------
/tests/unit/utils/test_typing_compat.py:
--------------------------------------------------------------------------------
1 | from typing import ClassVar, Generic, Union, List, Tuple, Dict, Callable, Literal
2 |
3 | import pytest
4 |
5 | from dataclass_wizard.type_def import T
6 | from dataclass_wizard.utils.typing_compat import get_origin, get_args
7 |
8 |
9 | @pytest.mark.parametrize(
10 | 'tp,expected',
11 | [
12 | (Literal[42], Literal),
13 | (int, int),
14 | (ClassVar[int], ClassVar),
15 | (Generic, Generic),
16 | (Generic[T], Generic),
17 | (Union[T, int], Union),
18 | (List[Tuple[T, T]][int], list),
19 | ]
20 | )
21 | def test_get_origin(tp, expected):
22 | actual = get_origin(tp)
23 | assert actual is expected
24 |
25 |
26 | @pytest.mark.parametrize(
27 | 'tp,expected',
28 | [
29 | (Dict[str, int], (str, int)),
30 | (int, ()),
31 | (Callable[[], T][int], ([], int)),
32 | (Union[int, Union[T, int], str][int], (int, str)),
33 | (Union[int, Tuple[T, int]][str], (int, Tuple[str, int])),
34 | ]
35 | )
36 | def test_get_args(tp, expected):
37 | actual = get_args(tp)
38 | assert actual == expected
39 |
--------------------------------------------------------------------------------
/dataclass_wizard/v1/__init__.py:
--------------------------------------------------------------------------------
1 | __all__ = [
2 | # Base exports
3 | 'LoadMixin',
4 | 'DumpMixin',
5 | # Models
6 | 'Alias',
7 | 'AliasPath',
8 | # Abstract Pattern
9 | 'Pattern',
10 | 'AwarePattern',
11 | 'UTCPattern',
12 | # "Naive" Date/Time Patterns
13 | 'DatePattern',
14 | 'DateTimePattern',
15 | 'TimePattern',
16 | # Timezone "Aware" Date/Time Patterns
17 | 'AwareDateTimePattern',
18 | 'AwareTimePattern',
19 | # UTC Date/Time Patterns
20 | 'UTCDateTimePattern',
21 | 'UTCTimePattern',
22 | ]
23 |
24 | from .dumpers import DumpMixin, setup_default_dumper
25 | from .loaders import LoadMixin, setup_default_loader
26 |
27 | from .models import (Alias,
28 | AliasPath,
29 | Pattern,
30 | AwarePattern,
31 | UTCPattern,
32 | DatePattern,
33 | DateTimePattern,
34 | TimePattern,
35 | AwareDateTimePattern,
36 | AwareTimePattern,
37 | UTCDateTimePattern,
38 | UTCTimePattern)
39 |
--------------------------------------------------------------------------------
/docs/dataclass_wizard.environ.rst:
--------------------------------------------------------------------------------
1 | dataclass\_wizard.environ package
2 | =================================
3 |
4 | Submodules
5 | ----------
6 |
7 | dataclass\_wizard.environ.dumpers module
8 | ----------------------------------------
9 |
10 | .. automodule:: dataclass_wizard.environ.dumpers
11 | :members:
12 | :undoc-members:
13 | :show-inheritance:
14 |
15 | dataclass\_wizard.environ.loaders module
16 | ----------------------------------------
17 |
18 | .. automodule:: dataclass_wizard.environ.loaders
19 | :members:
20 | :undoc-members:
21 | :show-inheritance:
22 |
23 | dataclass\_wizard.environ.lookups module
24 | ----------------------------------------
25 |
26 | .. automodule:: dataclass_wizard.environ.lookups
27 | :members:
28 | :undoc-members:
29 | :show-inheritance:
30 |
31 | dataclass\_wizard.environ.wizard module
32 | ---------------------------------------
33 |
34 | .. automodule:: dataclass_wizard.environ.wizard
35 | :members:
36 | :undoc-members:
37 | :show-inheritance:
38 |
39 | Module contents
40 | ---------------
41 |
42 | .. automodule:: dataclass_wizard.environ
43 | :members:
44 | :undoc-members:
45 | :show-inheritance:
46 |
--------------------------------------------------------------------------------
/dataclass_wizard/lazy_imports.py:
--------------------------------------------------------------------------------
1 | """
2 | Lazy Import definitions. Generally, these imports will be available when any
3 | "bonus features" are installed, i.e. as below:
4 |
5 | $ pip install dataclass-wizard[timedelta]
6 | """
7 |
8 | from .constants import PY311_OR_ABOVE
9 | from .utils.lazy_loader import LazyLoader
10 |
11 |
12 | # python-dotenv: for loading environment values from `.env` files
13 | dotenv = LazyLoader(globals(), 'dotenv', 'dotenv', local_name='python-dotenv')
14 |
15 | # pytimeparse: for parsing JSON string values as a `datetime.timedelta`
16 | pytimeparse = LazyLoader(globals(), 'pytimeparse', 'timedelta')
17 |
18 | # PyYAML: to add support for (de)serializing YAML data to dataclass instances
19 | yaml = LazyLoader(globals(), 'yaml', 'yaml', local_name='PyYAML')
20 |
21 | # Tomli -or- tomllib (PY 3.11+): to add support for (de)serializing TOML
22 | # data to dataclass instances
23 | if PY311_OR_ABOVE:
24 | import tomllib as toml
25 | else:
26 | toml = LazyLoader(globals(), 'tomli', 'toml', local_name='tomli')
27 |
28 | # Tomli-W: to add support for serializing dataclass instances to TOML
29 | toml_w = LazyLoader(globals(), 'tomli_w', 'toml', local_name='tomli-w')
30 |
--------------------------------------------------------------------------------
/dataclass_wizard/utils/json_util.py:
--------------------------------------------------------------------------------
1 | """
2 | JSON Helper Utilities - *only* internally used in ``errors.py``,
3 | i.e. for rendering exceptions.
4 |
5 | .. NOTE::
6 | This module should not be imported anywhere at the *top-level*
7 | of another library module!
8 |
9 | """
10 | __all__ = [
11 | 'safe_dumps',
12 | ]
13 |
14 | from dataclasses import is_dataclass
15 | from datetime import datetime, time, date
16 | from enum import Enum
17 | from json import dumps, JSONEncoder
18 | from typing import Any
19 | from uuid import UUID
20 |
21 | from ..loader_selection import asdict
22 |
23 |
24 | class SafeEncoder(JSONEncoder):
25 | """
26 | A Customized JSON Encoder, which copies core logic in the
27 | `dumpers` module to support serialization of more complex
28 | Python types, such as `datetime` and `Enum`.
29 | """
30 |
31 | def default(self, o: Any) -> Any:
32 | """Default function, copies the core (minimal) logic from `dumpers.py`."""
33 |
34 | if is_dataclass(o):
35 | return asdict(o)
36 |
37 | if isinstance(o, Enum):
38 | return o.value
39 |
40 | if isinstance(o, UUID):
41 | return o.hex
42 |
43 | if isinstance(o, (datetime, time)):
44 | return o.isoformat().replace('+00:00', 'Z', 1)
45 |
46 | if isinstance(o, date):
47 | return o.isoformat()
48 |
49 | # anything else (Decimal, timedelta, etc.)
50 | return str(o)
51 |
52 |
53 | def safe_dumps(o, cls=SafeEncoder, **kwargs):
54 | return dumps(o, cls=cls, **kwargs)
55 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Byte-compiled / optimized / DLL files
2 | __pycache__/
3 | *.py[cod]
4 | *$py.class
5 |
6 | # OSX useful to ignore
7 | *.DS_Store
8 | .AppleDouble
9 | .LSOverride
10 |
11 | # C extensions
12 | *.so
13 |
14 | # Distribution / packaging
15 | .Python
16 | build/
17 | develop-eggs/
18 | dist/
19 | downloads/
20 | eggs/
21 | .eggs/
22 | lib/
23 | lib64/
24 | parts/
25 | sdist/
26 | var/
27 | wheels/
28 | share/python-wheels/
29 | *.egg-info/
30 | .installed.cfg
31 | *.egg
32 | MANIFEST
33 |
34 | # PyInstaller
35 | # Usually these files are written by a python script from a template
36 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
37 | *.manifest
38 | *.spec
39 |
40 | # Installer logs
41 | pip-log.txt
42 | pip-delete-this-directory.txt
43 |
44 | # Unit test / coverage reports
45 | htmlcov/
46 | .tox/
47 | .nox/
48 | .coverage
49 | .coverage.*
50 | .cache
51 | nosetests.xml
52 | coverage.xml
53 | *.cover
54 | *.py,cover
55 | .hypothesis/
56 | .pytest_cache/
57 | cover/
58 |
59 | # Translations
60 | *.mo
61 | *.pot
62 |
63 | # Django stuff:
64 | *.log
65 |
66 | # Sphinx documentation
67 | docs/_build/
68 |
69 | # IntelliJ Idea family of suites
70 | .idea
71 | *.iml
72 | ## File-based project format:
73 | *.ipr
74 | *.iws
75 | ## mpeltonen/sbt-idea plugin
76 | .idea_modules/
77 |
78 | # IDE settings
79 | .vscode/
80 |
81 | # PyBuilder
82 | target/
83 |
84 | # pyenv
85 | .python-version
86 |
87 | # pipenv
88 | Pipfile.lock
89 |
90 | # Environments
91 | .env/
92 | .venv
93 | env/
94 | venv/
95 | ENV/
96 | env.bak/
97 | venv.bak/
98 |
99 | # File created by pytest
100 | testing.json
101 |
--------------------------------------------------------------------------------
/docs/installation.rst:
--------------------------------------------------------------------------------
1 | .. highlight:: shell
2 |
3 | ============
4 | Installation
5 | ============
6 |
7 |
8 | Stable release
9 | --------------
10 |
11 | To install Dataclass Wizard, run this command in your terminal:
12 |
13 | .. code-block:: console
14 |
15 | $ pip install dataclass-wizard
16 |
17 | This is the preferred method to install Dataclass Wizard, as it will always install the most recent stable release.
18 |
19 | If you don't have `pip`_ installed, this `Python installation guide`_ can guide
20 | you through the process.
21 |
22 | Dataclass Wizard is also available `on conda`_ under the `conda-forge`_ channel:
23 |
24 | .. code-block:: console
25 |
26 | $ conda install dataclass-wizard -c conda-forge
27 |
28 | .. _pip: https://pip.pypa.io
29 | .. _Python installation guide: http://docs.python-guide.org/en/latest/starting/installation/
30 | .. _conda: https://www.anaconda.com/
31 | .. _conda-forge: https://conda-forge.org/
32 | .. _on conda: https://anaconda.org/conda-forge/dataclass-wizard
33 |
34 | From sources
35 | ------------
36 |
37 | The sources for Dataclass Wizard can be downloaded from the `Github repo`_.
38 |
39 | You can either clone the public repository:
40 |
41 | .. code-block:: console
42 |
43 | $ git clone git://github.com/rnag/dataclass-wizard
44 |
45 | Or download the `tarball`_:
46 |
47 | .. code-block:: console
48 |
49 | $ curl -OJL https://github.com/rnag/dataclass-wizard/tarball/main
50 |
51 | Once you have a copy of the source, you can install it with:
52 |
53 | .. code-block:: console
54 |
55 | $ python setup.py install
56 |
57 |
58 | .. _Github repo: https://github.com/rnag/dataclass-wizard
59 | .. _tarball: https://github.com/rnag/dataclass-wizard/tarball/main
60 |
--------------------------------------------------------------------------------
/dataclass_wizard/enums.py:
--------------------------------------------------------------------------------
1 | """
2 | Re-usable Enum definitions
3 |
4 | """
5 | from enum import Enum
6 |
7 | from .environ import lookups
8 | from .utils.string_conv import *
9 | from .utils.wrappers import FuncWrapper
10 |
11 |
12 | class DateTimeTo(Enum):
13 | ISO_FORMAT = 0
14 | TIMESTAMP = 1
15 |
16 |
17 | class LetterCase(Enum):
18 |
19 | # Converts strings (generally in snake case) to camel case.
20 | # ex: `my_field_name` -> `myFieldName`
21 | CAMEL = FuncWrapper(to_camel_case)
22 | # Converts strings to "upper" camel case.
23 | # ex: `my_field_name` -> `MyFieldName`
24 | PASCAL = FuncWrapper(to_pascal_case)
25 | # Converts strings (generally in camel or snake case) to lisp case.
26 | # ex: `myFieldName` -> `my-field-name`
27 | LISP = FuncWrapper(to_lisp_case)
28 | # Converts strings (generally in camel case) to snake case.
29 | # ex: `myFieldName` -> `my_field_name`
30 | SNAKE = FuncWrapper(to_snake_case)
31 | # Performs no conversion on strings.
32 | # ex: `MY_FIELD_NAME` -> `MY_FIELD_NAME`
33 | NONE = FuncWrapper(lambda s: s)
34 |
35 | def __call__(self, *args):
36 | return self.value.f(*args)
37 |
38 |
39 | class LetterCasePriority(Enum):
40 | """
41 | Helper Enum which determines which letter casing we want to
42 | *prioritize* when loading environment variable names.
43 |
44 | The default
45 | """
46 | SCREAMING_SNAKE = FuncWrapper(lookups.with_screaming_snake_case)
47 | SNAKE = FuncWrapper(lookups.with_snake_case)
48 | CAMEL = FuncWrapper(lookups.with_pascal_or_camel_case)
49 | PASCAL = FuncWrapper(lookups.with_pascal_or_camel_case)
50 |
51 | def __call__(self, *args):
52 | return self.value.f(*args)
53 |
--------------------------------------------------------------------------------
/dataclass_wizard/environ/lookups.pyi:
--------------------------------------------------------------------------------
1 | from dataclasses import MISSING
2 | from typing import ClassVar, TypeAlias, Union
3 |
4 | from ..decorators import cached_class_property
5 | from ..type_def import StrCollection, EnvFileType
6 |
7 |
8 | _MISSING_TYPE: TypeAlias = type(MISSING)
9 | STR_OR_MISSING: TypeAlias = Union[str, _MISSING_TYPE]
10 | STR_OR_NONE: TypeAlias = Union[str, None]
11 |
12 | # Type of `os.environ` or `DotEnv` dict
13 | Environ = dict[str, STR_OR_NONE]
14 |
15 | # Type of (unique) environment variable names
16 | EnvVars = set[str]
17 |
18 |
19 | environ: Environ
20 |
21 |
22 | # noinspection PyMethodParameters
23 | class Env:
24 |
25 | __slots__ = ()
26 |
27 | _accessed_cleaned_to_env: ClassVar[bool] = False
28 |
29 | var_names: EnvVars
30 |
31 | @classmethod
32 | def load_environ(cls, force_reload=False) -> None: ...
33 |
34 | @classmethod
35 | def reload(cls, env: dict | None = None): ...
36 |
37 | @classmethod
38 | def secret_values(cls, dirs: EnvFileType) -> Environ: ...
39 |
40 | @classmethod
41 | def update_with_secret_values(cls, dirs: EnvFileType): ...
42 |
43 | @classmethod
44 | def dotenv_values(cls, files: EnvFileType) -> Environ: ...
45 |
46 | @classmethod
47 | def update_with_dotenv(cls, files: EnvFileType = '.env',
48 | dotenv_values=None): ...
49 |
50 | # noinspection PyDunderSlots,PyUnresolvedReferences
51 | @cached_class_property
52 | def cleaned_to_env(cls) -> Environ: ...
53 |
54 |
55 | def clean(s: str) -> str: ...
56 | def try_cleaned(key: str) -> STR_OR_MISSING: ...
57 | def lookup_exact(var: StrCollection) -> STR_OR_MISSING: ...
58 | def with_screaming_snake_case(field_name: str) -> STR_OR_MISSING: ...
59 | def with_snake_case(field_name: str) -> STR_OR_MISSING: ...
60 | def with_pascal_or_camel_case(field_name: str) -> STR_OR_MISSING: ...
61 |
--------------------------------------------------------------------------------
/.github/workflows/dev.yml:
--------------------------------------------------------------------------------
1 | # This is a basic workflow to help you get started with Actions
2 |
3 | name: CI
4 |
5 | # Controls when the action will run.
6 | on:
7 | push:
8 | branches: [ main ]
9 | # Triggers the workflow on pull request events but only for the master branch
10 | pull_request:
11 | branches: [ main ]
12 |
13 | # Allows you to run this workflow manually from the Actions tab
14 | workflow_dispatch:
15 |
16 | concurrency:
17 | group: ci-${{ github.ref }}
18 | cancel-in-progress: true
19 |
20 | # A workflow run is made up of one or more jobs that can run sequentially or in parallel
21 | jobs:
22 | # This workflow contains a single job called "build"
23 | test:
24 | # The type of runner that the job will run on
25 | timeout-minutes: 30
26 | strategy:
27 | matrix:
28 | python-versions: ['3.9', '3.10', '3.11', '3.12', '3.13', '3.14']
29 | os: [ubuntu-latest]
30 | # Uncomment if I need to run it on other environments too (currently
31 | # there's not a huge need)
32 | # os: [ubuntu-20.04, windows-latest, macos-latest]
33 | runs-on: ${{ matrix.os }}
34 |
35 | # Steps represent a sequence of tasks that will be executed as part of the job
36 | steps:
37 | # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
38 | - name: Checkout
39 | uses: actions/checkout@v4
40 | with:
41 | persist-credentials: false
42 | - name: Setup python
43 | uses: actions/setup-python@v5
44 | with:
45 | python-version: ${{ matrix.python-versions }}
46 |
47 | - name: Install dependencies
48 | run: |
49 | python -m pip install --upgrade pip
50 | pip install tox tox-gh-actions
51 |
52 | - name: test with tox
53 | run: tox
54 |
55 | - name: list files
56 | run: ls -l .
57 |
--------------------------------------------------------------------------------
/dataclass_wizard/constants.py:
--------------------------------------------------------------------------------
1 | import os
2 | import sys
3 |
4 |
5 | # Package name
6 | PACKAGE_NAME = 'dataclass_wizard'
7 |
8 | # Library Log Level
9 | LOG_LEVEL = os.getenv('WIZARD_LOG_LEVEL', 'ERROR').upper()
10 |
11 | # Current system Python version
12 | _PY_VERSION = sys.version_info[:2]
13 |
14 | # Check if currently running Python 3.10 or higher
15 | PY310_OR_ABOVE = _PY_VERSION >= (3, 10)
16 |
17 | # Check if currently running Python 3.11 or higher
18 | PY311_OR_ABOVE = _PY_VERSION >= (3, 11)
19 |
20 | # Check if currently running Python 3.12 or higher
21 | PY312_OR_ABOVE = _PY_VERSION >= (3, 12)
22 |
23 | # Check if currently running Python 3.13 or higher
24 | PY313_OR_ABOVE = _PY_VERSION >= (3, 13)
25 |
26 | # Check if currently running Python 3.14 or higher
27 | PY314_OR_ABOVE = _PY_VERSION >= (3, 14)
28 |
29 | # The name of the dictionary object that contains `load` hooks for each
30 | # object type. Also used to check if a class is a :class:`BaseLoadHook`
31 | _LOAD_HOOKS = '__LOAD_HOOKS__'
32 |
33 | # The name of the dictionary object that contains `dump` hooks for each
34 | # object type. Also used to check if a class is a :class:`BaseDumpHook`
35 | _DUMP_HOOKS = '__DUMP_HOOKS__'
36 |
37 | # Attribute name that will be defined for single-arg alias functions and
38 | # methods; mainly for internal use.
39 | SINGLE_ARG_ALIAS = '__SINGLE_ARG_ALIAS__'
40 |
41 | # Attribute name that will be defined for identity functions and methods;
42 | # mainly for internal use.
43 | IDENTITY = '__IDENTITY__'
44 |
45 | # The dictionary key that identifies the tag field for a class. This is only
46 | # set when the `tag` field or the `auto_assign_tags` flag is enabled in the
47 | # `Meta` config for a dataclass.
48 | #
49 | # Note that this key can also be customized in the `Meta` config for a class,
50 | # via the :attr:`tag_key` field.
51 | TAG = '__tag__'
52 |
53 |
54 | # INTERNAL USE ONLY: The dictionary key that the library
55 | # sets/uses to identify a "catch all" field, which captures
56 | # JSON key/values that don't map to any known dataclass fields.
57 | CATCH_ALL = '<-|CatchAll|->'
58 |
--------------------------------------------------------------------------------
/tests/unit/test_hooks.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | import pytest
4 |
5 | from dataclasses import dataclass
6 | from ipaddress import IPv4Address
7 |
8 | from dataclass_wizard import JSONWizard, LoadMeta
9 | from dataclass_wizard.errors import ParseError
10 | from dataclass_wizard import DumpMixin, LoadMixin
11 |
12 |
13 | def test_register_type_ipv4address_roundtrip():
14 |
15 | @dataclass
16 | class Foo(JSONWizard):
17 | s: str | None = None
18 | c: IPv4Address | None = None
19 |
20 | Foo.register_type(IPv4Address)
21 |
22 | data = {"c": "127.0.0.1", "s": "foobar"}
23 |
24 | foo = Foo.from_dict(data)
25 | assert foo.c == IPv4Address("127.0.0.1")
26 |
27 | assert foo.to_dict() == data
28 | assert Foo.from_dict(foo.to_dict()).to_dict() == data
29 |
30 |
31 | def test_ipv4address_without_hook_raises_parse_error():
32 |
33 | @dataclass
34 | class Foo(JSONWizard):
35 | c: IPv4Address | None = None
36 |
37 | data = {"c": "127.0.0.1"}
38 |
39 | with pytest.raises(ParseError) as e:
40 | Foo.from_dict(data)
41 |
42 | assert e.value.phase == 'load'
43 |
44 | msg = str(e.value)
45 | # assert "field `c`" in msg
46 | assert "not currently supported" in msg
47 | assert "IPv4Address" in msg
48 | assert "load" in msg.lower()
49 |
50 |
51 | def test_ipv4address_hooks_with_load_and_dump_mixins_roundtrip():
52 | @dataclass
53 | class Foo(JSONWizard, DumpMixin, LoadMixin):
54 | c: IPv4Address | None = None
55 |
56 | @classmethod
57 | def load_to_ipv4_address(cls, o, *_):
58 | return IPv4Address(o)
59 |
60 | @classmethod
61 | def dump_from_ipv4_address(cls, o, *_):
62 | return str(o)
63 |
64 | Foo.register_load_hook(IPv4Address, Foo.load_to_ipv4_address)
65 | Foo.register_dump_hook(IPv4Address, Foo.dump_from_ipv4_address)
66 |
67 | data = {"c": "127.0.0.1"}
68 |
69 | foo = Foo.from_dict(data)
70 | assert foo.c == IPv4Address("127.0.0.1")
71 |
72 | assert foo.to_dict() == data
73 | assert Foo.from_dict(foo.to_dict()).to_dict() == data
74 |
--------------------------------------------------------------------------------
/tests/unit/test_models.py:
--------------------------------------------------------------------------------
1 | import pytest
2 | from pytest_mock import MockerFixture
3 |
4 | from dataclass_wizard import fromlist
5 | from dataclass_wizard.models import Container, json_field
6 | from .conftest import SampleClass
7 |
8 |
9 | @pytest.fixture
10 | def mock_open(mocker: MockerFixture):
11 | return mocker.patch('dataclass_wizard.models.open')
12 |
13 |
14 | def test_json_field_does_not_allow_both_default_and_default_factory():
15 | """
16 | Confirm we can't specify both `default` and `default_factory` when
17 | calling the :func:`json_field` helper function.
18 | """
19 | with pytest.raises(ValueError):
20 | _ = json_field((), default=None, default_factory=None)
21 |
22 |
23 | def test_container_with_incorrect_usage():
24 | """Confirm an error is raised when wrongly instantiating a Container."""
25 | c = Container()
26 |
27 | with pytest.raises(TypeError) as exc_info:
28 | _ = c.to_json()
29 |
30 | err_msg = exc_info.exconly()
31 | assert 'A Container object needs to be instantiated ' \
32 | 'with a generic type T' in err_msg
33 |
34 |
35 | def test_container_methods(mocker: MockerFixture, mock_open):
36 | list_of_dict = [{'f1': 'hello', 'f2': 1},
37 | {'f1': 'world', 'f2': 2}]
38 |
39 | list_of_a = fromlist(SampleClass, list_of_dict)
40 |
41 | c = Container[SampleClass](list_of_a)
42 |
43 | # The repr() is very short, so it would be expected to fit in one line,
44 | # which thus aligns with the output of `pprint.pformat`.
45 | assert str(c) == repr(c)
46 |
47 | assert c.prettify() == """\
48 | [
49 | {
50 | "f1": "hello",
51 | "f2": 1
52 | },
53 | {
54 | "f1": "world",
55 | "f2": 2
56 | }
57 | ]"""
58 |
59 | assert c.to_json() == '[{"f1": "hello", "f2": 1}, {"f1": "world", "f2": 2}]'
60 |
61 | mock_open.assert_not_called()
62 | mock_encoder = mocker.Mock()
63 |
64 | filename = 'my_file.json'
65 | c.to_json_file(filename, encoder=mock_encoder)
66 |
67 | mock_open.assert_called_once_with(filename, 'w')
68 | mock_encoder.assert_called_once_with(list_of_dict, mocker.ANY)
69 |
--------------------------------------------------------------------------------
/tests/unit/environ/test_lookups.py:
--------------------------------------------------------------------------------
1 | import pytest
2 |
3 | from dataclass_wizard.environ.lookups import *
4 |
5 |
6 | @pytest.mark.parametrize(
7 | 'string,expected',
8 | [
9 | ('device_type', 'devicetype'),
10 | ('isACamelCasedWORD', 'isacamelcasedword'),
11 | ('ATitledWordToTESTWith', 'atitledwordtotestwith'),
12 | ('not-a-tester', 'notatester'),
13 | ('helloworld', 'helloworld'),
14 | ('A', 'a'),
15 | ('TESTing_if_thisWorks', 'testingifthisworks'),
16 | ('a_B_Cde_fG_hi', 'abcdefghi'),
17 | ('How_-Are-_YoUDoing__TeST', 'howareyoudoingtest'),
18 | ]
19 | )
20 | def test_clean(string, expected):
21 | assert clean(string) == expected
22 |
23 |
24 | def test_lookup_exact():
25 | assert lookup_exact('abc-this-key-shouldnt-exist') is MISSING
26 | assert lookup_exact(('abc-this-key-shouldnt-exist', )) is MISSING
27 |
28 |
29 | def test_reload_when_not_accessed_cleaned_to_env():
30 | # save current value
31 | current_val = Env._accessed_cleaned_to_env
32 |
33 | Env._accessed_cleaned_to_env = False
34 | Env.reload()
35 |
36 | # don't forget to reset it
37 | Env._accessed_cleaned_to_env = current_val
38 |
39 |
40 | def test_with_snake_case():
41 | var = 'my_test_string_1'
42 | assert with_snake_case(var) is MISSING
43 |
44 | os.environ['MY_TEST_STRING_1'] = 'hello world'
45 | Env.reload()
46 | assert with_snake_case(var) == 'hello world'
47 |
48 | os.environ[var] = 'testing 123'
49 | Env.reload()
50 | assert with_snake_case(var) == 'testing 123'
51 |
52 |
53 | def test_with_pascal_or_camel_case():
54 | var = 'MyTestString2'
55 | assert with_pascal_or_camel_case(var) is MISSING
56 |
57 | os.environ['my_test_string2'] = 'testing 123'
58 | Env.reload()
59 | assert with_pascal_or_camel_case(var) == 'testing 123'
60 |
61 | os.environ['MY_TEST_STRING2'] = 'hello world'
62 | Env.reload()
63 | assert with_pascal_or_camel_case(var) == 'hello world'
64 |
65 | if os.name == 'nt':
66 | # Windows: var names are automatically converted
67 | # to upper case when saved to `os.environ`
68 | return
69 |
70 | os.environ[var] = 'hello world !!'
71 | Env.reload()
72 | assert with_pascal_or_camel_case(var) == 'hello world !!'
73 |
--------------------------------------------------------------------------------
/dataclass_wizard/utils/dataclass_compat.py:
--------------------------------------------------------------------------------
1 | """
2 | Pulling some functions removed in recent versions of Python into the module for continued compatibility.
3 | All function names and bodies are left exactly as they were prior to being removed.
4 | """
5 |
6 | from dataclasses import MISSING
7 | from types import FunctionType
8 |
9 |
10 | def _set_qualname(cls, value):
11 | # Removed in Python 3.13
12 | # Original: `dataclasses._set_qualname`
13 | # Ensure that the functions returned from _create_fn uses the proper
14 | # __qualname__ (the class they belong to).
15 | if isinstance(value, FunctionType):
16 | value.__qualname__ = f"{cls.__qualname__}.{value.__name__}"
17 | return value
18 |
19 |
20 | def _set_new_attribute(cls, name, value):
21 | # Removed in Python 3.13
22 | # Original: `dataclasses._set_new_attribute`
23 | # Never overwrites an existing attribute. Returns True if the
24 | # attribute already exists.
25 | if name in cls.__dict__:
26 | return True
27 | _set_qualname(cls, value)
28 | setattr(cls, name, value)
29 | return False
30 |
31 |
32 | def _create_fn(name, args, body, *, globals=None, locals=None,
33 | return_type=MISSING):
34 | # Removed in Python 3.13
35 | # Original: `dataclasses._create_fn`
36 | # Note that we may mutate locals. Callers beware!
37 | # The only callers are internal to this module, so no
38 | # worries about external callers.
39 | if locals is None:
40 | locals = {}
41 | return_annotation = ''
42 | if return_type is not MISSING:
43 | locals['__dataclass_return_type__'] = return_type
44 | return_annotation = '->__dataclass_return_type__'
45 | args = ','.join(args)
46 | body = '\n'.join(f' {b}' for b in body)
47 |
48 | # Compute the text of the entire function.
49 | txt = f' def {name}({args}){return_annotation}:\n{body}'
50 |
51 | # Free variables in exec are resolved in the global namespace.
52 | # The global namespace we have is user-provided, so we can't modify it for
53 | # our purposes. So we put the things we need into locals and introduce a
54 | # scope to allow the function we're creating to close over them.
55 | local_vars = ', '.join(locals.keys())
56 | txt = f"def __create_fn__({local_vars}):\n{txt}\n return {name}"
57 | ns = {}
58 | exec(txt, globals, ns)
59 | return ns['__create_fn__'](**locals)
60 |
--------------------------------------------------------------------------------
/tests/testdata/test4.json:
--------------------------------------------------------------------------------
1 | [
2 | {
3 | "input_index": 0,
4 | "candidate_index": 0,
5 | "delivery_line_1": "1 N Rosedale St",
6 | "last_line": "Baltimore MD 21229-3737",
7 | "delivery_point_barcode": "212293737013",
8 | "components": {
9 | "primary_number": "1",
10 | "street_predirection": "N",
11 | "street_name": "Rosedale",
12 | "street_suffix": "St",
13 | "city_name": "Baltimore",
14 | "state_abbreviation": "MD",
15 | "zipcode": "21229",
16 | "plus4_code": "3737",
17 | "delivery_point": "01",
18 | "delivery_point_check_digit": "3"
19 | },
20 | "metadata": {
21 | "record_type": "S",
22 | "zip_type": "Standard",
23 | "county_fips": "24510",
24 | "county_name": "Baltimore City",
25 | "carrier_route": "C047",
26 | "congressional_district": "07",
27 | "rdi": "Residential",
28 | "elot_sequence": "0059",
29 | "elot_sort": "A",
30 | "latitude": 39.28602,
31 | "longitude": -76.6689,
32 | "precision": "Zip9",
33 | "time_zone": "Eastern",
34 | "utc_offset": -5,
35 | "dst": true
36 | },
37 | "analysis": {
38 | "dpv_match_code": "Y",
39 | "dpv_footnotes": "AABB",
40 | "dpv_cmra": "N",
41 | "dpv_vacant": "N",
42 | "active": "Y"
43 | }
44 | },
45 | {
46 | "input_index": 0,
47 | "candidate_index": 1,
48 | "delivery_line_1": "1 S Rosedale St",
49 | "last_line": "Baltimore MD 21229-3739",
50 | "delivery_point_barcode": "212293739011",
51 | "components": {
52 | "primary_number": "1",
53 | "street_predirection": "S",
54 | "street_name": "Rosedale",
55 | "street_suffix": "St",
56 | "city_name": "Baltimore",
57 | "state_abbreviation": "MD",
58 | "zipcode": "21229",
59 | "plus4_code": "3739",
60 | "delivery_point": "01",
61 | "delivery_point_check_digit": "1"
62 | },
63 | "metadata": {
64 | "record_type": "S",
65 | "zip_type": "Standard",
66 | "county_fips": "24510",
67 | "county_name": "Baltimore City",
68 | "carrier_route": "C047",
69 | "congressional_district": "07",
70 | "rdi": "Residential",
71 | "elot_sequence": "0064",
72 | "elot_sort": "A",
73 | "latitude": 39.2858,
74 | "longitude": -76.66889,
75 | "precision": "Zip9",
76 | "time_zone": "Eastern",
77 | "utc_offset": -5,
78 | "dst": true
79 | },
80 | "analysis": {
81 | "dpv_match_code": "Y",
82 | "dpv_footnotes": "AABB",
83 | "dpv_cmra": "N",
84 | "dpv_vacant": "N",
85 | "active": "Y"
86 | }
87 | }
88 | ]
89 |
--------------------------------------------------------------------------------
/.github/workflows/release.yml:
--------------------------------------------------------------------------------
1 | # Publish package on main branch if it's tagged with 'v*'
2 | # Ref: https://github.community/t/run-workflow-on-push-tag-on-specific-branch/17519
3 |
4 | name: build & release
5 |
6 | permissions:
7 | contents: read
8 |
9 | # Controls when the action will run.
10 | on:
11 | # Triggers the workflow on push or pull request events but only for the master branch
12 | push:
13 | tags:
14 | - 'v*'
15 |
16 | # Allows you to run this workflow manually from the Actions tab
17 | workflow_dispatch:
18 |
19 | # A workflow run is made up of one or more jobs that can run sequentially or in parallel
20 | jobs:
21 | # This workflow contains a single job called "build"
22 | release:
23 | name: Create Release
24 | runs-on: ubuntu-latest
25 |
26 | # Steps represent a sequence of tasks that will be executed as part of the job
27 | steps:
28 | # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
29 | - name: Checkout code
30 | uses: actions/checkout@v4
31 | # with:
32 | # fetch-depth: 0
33 |
34 | - name: Fail if tag is not on main
35 | run: |
36 | git fetch origin main
37 | git merge-base --is-ancestor "$GITHUB_SHA" "origin/main"
38 |
39 | # Temporarily disable this - I want it to trigger on merge, but it doesn't
40 | # work (at least not on a tagged commit too)
41 | # - name: Exit if not on main branch
42 | # if: endsWith(github.ref, 'main') == false
43 | # run: exit -1
44 |
45 | - name: Set up Python
46 | uses: actions/setup-python@v5
47 | with:
48 | python-version: '3.11'
49 |
50 | # - name: Replace version in README
51 | # run: |
52 | # VERSION=$(grep -oP "__version__\s*=\s*'\K[^']+" dataclass_wizard/__version__.py)
53 | # echo "Extracted version: $VERSION"
54 | # sed -i "s/|version|/$VERSION/g" README.rst
55 |
56 | - name: Build wheels and source tarball
57 | run: |
58 | make dist
59 |
60 | - name: Check dist metadata
61 | run: |
62 | python -m pip install --upgrade pip
63 | python -m pip install twine
64 | twine check dist/*
65 |
66 | - name: Publish to PyPI
67 | uses: pypa/gh-action-pypi-publish@release/v1
68 | with:
69 | user: __token__
70 | password: ${{ secrets.PYPI_API_TOKEN }}
71 | skip_existing: true
72 |
--------------------------------------------------------------------------------
/dataclass_wizard/utils/lazy_loader.py:
--------------------------------------------------------------------------------
1 | """
2 | Utility for lazy loading Python modules.
3 |
4 | Credits: https://wil.yegelwel.com/lazily-importing-python-modules/
5 | """
6 | import importlib
7 | import logging
8 | import types
9 |
10 |
11 | class LazyLoader(types.ModuleType):
12 | """
13 | Lazily import a module, mainly to avoid pulling in large dependencies.
14 | `contrib`, and `ffmpeg` are examples of modules that are large and not always
15 | needed, and this allows them to only be loaded when they are used.
16 | """
17 |
18 | def __init__(self, parent_module_globals, name,
19 | extra=None, local_name=None, warning=None):
20 |
21 | self._local_name = local_name or name
22 | self._parent_module_globals = parent_module_globals
23 | self._extra = extra
24 | self._warning = warning
25 |
26 | super(LazyLoader, self).__init__(name)
27 |
28 | def _load(self):
29 | """Load the module and insert it into the parent's globals."""
30 |
31 | # Import the target module and insert it into the parent's namespace
32 | try:
33 | module = importlib.import_module(self.__name__)
34 |
35 | except ModuleNotFoundError:
36 | # The lazy-loaded module is not currently installed.
37 | msg = f'Unable to import the module `{self._local_name}`'
38 |
39 | if self._extra:
40 | from ..__version__ import __title__
41 | msg = f'{msg}. Please run the following command to resolve the issue:\n' \
42 | f' $ pip install {__title__}[{self._extra}]'
43 |
44 | raise ImportError(msg) from None
45 |
46 | self._parent_module_globals[self._local_name] = module
47 |
48 | # Emit a warning if one was specified
49 | if self._warning:
50 | logging.warning(self._warning)
51 | # Make sure to only warn once.
52 | self._warning = None
53 |
54 | # Update this object's dict so that if someone keeps a reference to the
55 | # LazyLoader, lookups are efficient (__getattr__ is only called on lookups
56 | # that fail).
57 | self.__dict__.update(module.__dict__)
58 |
59 | return module
60 |
61 | def __getattr__(self, item):
62 | module = self._load()
63 | return getattr(module, item)
64 |
65 | def __dir__(self):
66 | module = self._load()
67 | return dir(module)
68 |
--------------------------------------------------------------------------------
/tests/conftest.py:
--------------------------------------------------------------------------------
1 | __all__ = [
2 | 'snake',
3 | 'does_not_raise',
4 | 'data_file_path',
5 | 'PY310_OR_ABOVE',
6 | 'PY311_OR_ABOVE',
7 | 'PY312_OR_ABOVE',
8 | 'TypedDict',
9 | # For compatibility with Python 3.9 and 3.10
10 | 'Required',
11 | 'NotRequired',
12 | 'LiteralString',
13 | ]
14 |
15 | import sys
16 | # Ref: https://docs.pytest.org/en/6.2.x/example/parametrize.html#parametrizing-conditional-raising
17 | from contextlib import nullcontext as does_not_raise
18 | from pathlib import Path
19 |
20 | from dataclass_wizard.utils.string_conv import to_snake_case
21 |
22 |
23 | # Directory for test files
24 | TEST_DATA_DIR = Path(__file__).resolve().parent / 'testdata'
25 |
26 | # Check if we are running Python 3.10+
27 | PY310_OR_ABOVE = sys.version_info[:2] >= (3, 10)
28 |
29 | # Check if we are running Python 3.11+
30 | PY311_OR_ABOVE = sys.version_info[:2] >= (3, 11)
31 |
32 | # Check if we are running Python 3.12+
33 | PY312_OR_ABOVE = sys.version_info[:2] >= (3, 12)
34 |
35 | # Check if we are running Python 3.9 or 3.10
36 | PY310_OR_EARLIER = not PY311_OR_ABOVE
37 |
38 | # Weird, test cases for `TypedDict` fail in Python 3.9 & 3.10.15 (3.10:latest)
39 | # So apparently, we need to use the one from `typing_extensions`.
40 | if PY310_OR_EARLIER:
41 | from typing_extensions import TypedDict
42 | else:
43 | from typing import TypedDict
44 |
45 | # typing.Required and typing.NotRequired: Introduced in Python 3.11
46 | if PY311_OR_ABOVE:
47 | from typing import Required
48 | from typing import NotRequired
49 | from typing import LiteralString
50 | else:
51 | from typing_extensions import Required
52 | from typing_extensions import NotRequired
53 | from typing_extensions import LiteralString
54 |
55 |
56 | # Ignore test files if the Python version is below 3.12
57 | if not PY312_OR_ABOVE:
58 | print("Python version is below 3.12. Ignoring test files.")
59 | collect_ignore = [
60 | Path('unit', 'v1', 'test_union_as_type_alias_recursive.py').as_posix(),
61 | ]
62 |
63 | def data_file_path(name: str) -> str:
64 | """Returns the full path to a test file."""
65 | return str((TEST_DATA_DIR / name).absolute())
66 |
67 |
68 | def snake(d):
69 | """
70 | Helper function to snake-case all keys in a dictionary `d`.
71 |
72 | Useful for `v1`, which by default requires a 1:1 mapping of
73 | JSON key to dataclass field.
74 | """
75 | return {to_snake_case(k): v for k, v in d.items()}
76 |
--------------------------------------------------------------------------------
/dataclass_wizard/environ/wizard.pyi:
--------------------------------------------------------------------------------
1 | import json
2 | from dataclasses import Field
3 | from typing import AnyStr, dataclass_transform, Collection, Sequence
4 |
5 | from ..abstractions import AbstractEnvWizard, E
6 | from ..bases_meta import BaseEnvWizardMeta
7 | from ..enums import LetterCase
8 | from ..errors import ParseError
9 | from ..type_def import JSONObject, Encoder, EnvFileType
10 |
11 |
12 | @dataclass_transform(kw_only_default=True)
13 | class EnvWizard(AbstractEnvWizard):
14 | __slots__ = ()
15 |
16 | class Meta(BaseEnvWizardMeta):
17 |
18 | __slots__ = ()
19 |
20 | # Class attribute to enable detection of the class type.
21 | __is_inner_meta__ = True
22 |
23 | def __init_subclass__(cls):
24 | # Set the `__init_subclass__` method here, so we can ensure it
25 | # doesn't run for the `EnvWizard.Meta` class.
26 | return cls._init_subclass()
27 |
28 | __fields__: dict[str, Field]
29 |
30 | def to_dict(self: E,
31 | *,
32 | dict_factory=dict,
33 | exclude: Collection[str] | None = None,
34 | skip_defaults: bool | None = None,
35 | ) -> JSONObject: ...
36 |
37 | def to_json(self: E, *,
38 | encoder: Encoder = json.dumps,
39 | **encoder_kwargs) -> AnyStr: ...
40 |
41 | # stub for type hinting purposes.
42 | def __init__(self, *,
43 | _env_file: EnvFileType = None,
44 | _reload: bool = False,
45 | _env_prefix:str=None,
46 | _secrets_dir:EnvFileType | Sequence[EnvFileType]=None,
47 | **init_kwargs) -> None: ...
48 |
49 | def __init_subclass__(cls, *, reload_env: bool = False,
50 | debug: bool = False,
51 | key_transform=LetterCase.NONE): ...
52 |
53 | @classmethod
54 | def _create_methods(cls) -> None: ...
55 |
56 |
57 | def _add_missing_var(missing_vars: list,
58 | name: str,
59 | env_prefix: str | None,
60 | var_name: str | None,
61 | tp: type) -> None: ...
62 |
63 |
64 | def _handle_parse_error(e: ParseError,
65 | cls: type,
66 | name: str,
67 | env_prefix: str | None,
68 | var_name: str | None): ...
69 |
70 | def _get_var_name(name: str,
71 | env_prefix: str | None,
72 | var_name: str | None) -> str: ...
73 |
--------------------------------------------------------------------------------
/docs/dataclass_wizard.utils.rst:
--------------------------------------------------------------------------------
1 | dataclass\_wizard.utils package
2 | ===============================
3 |
4 | Submodules
5 | ----------
6 |
7 | dataclass\_wizard.utils.dataclass\_compat module
8 | ------------------------------------------------
9 |
10 | .. automodule:: dataclass_wizard.utils.dataclass_compat
11 | :members:
12 | :undoc-members:
13 | :show-inheritance:
14 |
15 | dataclass\_wizard.utils.dict\_helper module
16 | -------------------------------------------
17 |
18 | .. automodule:: dataclass_wizard.utils.dict_helper
19 | :members:
20 | :undoc-members:
21 | :show-inheritance:
22 |
23 | dataclass\_wizard.utils.function\_builder module
24 | ------------------------------------------------
25 |
26 | .. automodule:: dataclass_wizard.utils.function_builder
27 | :members:
28 | :undoc-members:
29 | :show-inheritance:
30 |
31 | dataclass\_wizard.utils.json\_util module
32 | -----------------------------------------
33 |
34 | .. automodule:: dataclass_wizard.utils.json_util
35 | :members:
36 | :undoc-members:
37 | :show-inheritance:
38 |
39 | dataclass\_wizard.utils.lazy\_loader module
40 | -------------------------------------------
41 |
42 | .. automodule:: dataclass_wizard.utils.lazy_loader
43 | :members:
44 | :undoc-members:
45 | :show-inheritance:
46 |
47 | dataclass\_wizard.utils.object\_path module
48 | -------------------------------------------
49 |
50 | .. automodule:: dataclass_wizard.utils.object_path
51 | :members:
52 | :undoc-members:
53 | :show-inheritance:
54 |
55 | dataclass\_wizard.utils.string\_conv module
56 | -------------------------------------------
57 |
58 | .. automodule:: dataclass_wizard.utils.string_conv
59 | :members:
60 | :undoc-members:
61 | :show-inheritance:
62 |
63 | dataclass\_wizard.utils.type\_conv module
64 | -----------------------------------------
65 |
66 | .. automodule:: dataclass_wizard.utils.type_conv
67 | :members:
68 | :undoc-members:
69 | :show-inheritance:
70 |
71 | dataclass\_wizard.utils.typing\_compat module
72 | ---------------------------------------------
73 |
74 | .. automodule:: dataclass_wizard.utils.typing_compat
75 | :members:
76 | :undoc-members:
77 | :show-inheritance:
78 |
79 | dataclass\_wizard.utils.wrappers module
80 | ---------------------------------------
81 |
82 | .. automodule:: dataclass_wizard.utils.wrappers
83 | :members:
84 | :undoc-members:
85 | :show-inheritance:
86 |
87 | Module contents
88 | ---------------
89 |
90 | .. automodule:: dataclass_wizard.utils
91 | :members:
92 | :undoc-members:
93 | :show-inheritance:
94 |
--------------------------------------------------------------------------------
/docs/advanced_usage/serializer_hooks.rst:
--------------------------------------------------------------------------------
1 | Serializer Hooks
2 | ================
3 |
4 | .. note::
5 | To customize the load or dump process for annotated types
6 | instead of individual fields, please see the `Type
7 | Hooks `__ section.
8 |
9 | You can optionally add hooks that are run before a JSON string or a
10 | Python ``dict`` object is loaded to a dataclass instance, or before the
11 | dataclass instance is converted back to a Python ``dict`` object.
12 |
13 | To customize the load process:
14 |
15 | * To pre-process data before ``from_dict`` is called, simply
16 | implement a ``_pre_from_dict`` method which will be called
17 | whenever you invoke the ``from_dict`` or ``from_json`` methods.
18 | Please note that this will pass in the original ``dict`` object,
19 | so updating any values will affect data in the underlying ``dict``
20 | (**this might change in a future revision**).
21 | * To post-process data, *after* a dataclass instance is de-serialized,
22 | simply implement the ``__post_init__`` method which will be run
23 | by the ``dataclass`` decorator.
24 |
25 | To customize the dump process, simply implement
26 | a ``_pre_dict`` method which will be called
27 | whenever you invoke the ``to_dict`` or ``to_json``
28 | methods. Please note that this will pass in the
29 | original dataclass instance, so updating any values
30 | will affect the fields of the underlying dataclass
31 | (**this might change in a future revision**).
32 |
33 | A simple example to illustrate both approaches is shown below:
34 |
35 | .. code:: python3
36 |
37 | from dataclasses import dataclass
38 | from dataclass_wizard import JSONWizard
39 | from dataclass_wizard.type_def import JSONObject
40 |
41 |
42 | @dataclass
43 | class MyClass(JSONWizard):
44 | my_str: str
45 | my_int: int
46 | my_bool: bool = False
47 |
48 | def __post_init__(self):
49 | self.my_str = self.my_str.title()
50 | self.my_int *= 2
51 |
52 | @classmethod
53 | def _pre_from_dict(cls, o: JSONObject) -> JSONObject:
54 | # o = o.copy() # Copying the `dict` object is optional
55 | o['my_bool'] = True # Adds a new key/value pair
56 | return o
57 |
58 | def _pre_dict(self):
59 | self.my_str = self.my_str.swapcase()
60 |
61 |
62 | data = {"my_str": "my string", "myInt": "10"}
63 |
64 | c = MyClass.from_dict(data)
65 | print(repr(c))
66 | # prints:
67 | # MyClass(my_str='My String', my_int=20, my_bool=True)
68 |
69 | string = c.to_json()
70 | print(string)
71 | # prints:
72 | # {"myStr": "mY sTRING", "myInt": 20, "myBool": true}
73 |
--------------------------------------------------------------------------------
/docs/quickstart.rst:
--------------------------------------------------------------------------------
1 | ==========
2 | Quickstart
3 | ==========
4 |
5 | Here are the supported features that Dataclass Wizard currently provides:
6 |
7 | - *JSON (de)serialization*: marshal dataclasses to/from JSON and Python
8 | ``dict`` objects.
9 | - *Field properties*: support for using properties with default
10 | values in dataclass instances.
11 |
12 | The below is an quick demo of both of these features - how to marshal dataclasses to/from JSON and Python ``dict`` objects,
13 | and declare and use field properties with default values.
14 |
15 |
16 | .. code:: python3
17 |
18 | from dataclasses import dataclass, field
19 | from datetime import datetime
20 | from typing import Optional
21 |
22 | from dataclass_wizard import JSONSerializable, property_wizard
23 |
24 |
25 | @dataclass
26 | class MyClass(JSONSerializable, metaclass=property_wizard):
27 |
28 | my_str: Optional[str]
29 | list_of_int: list[int] = field(default_factory=list)
30 | # You can also define this as `my_dt`, however only the annotation
31 | # will carry over in that case, since the value is re-declared by
32 | # the property below. See also the 'Using Field Properties' section
33 | # in the docs for a more elegant approach.
34 | _my_dt: datetime = datetime(2000, 1, 1)
35 |
36 | @property
37 | def my_dt(self):
38 | """
39 | A sample `getter` which returns the datetime with year set as 2010
40 | """
41 | if self._my_dt is not None:
42 | return self._my_dt.replace(year=2010)
43 | return self._my_dt
44 |
45 | @my_dt.setter
46 | def my_dt(self, new_dt: datetime):
47 | """
48 | A sample `setter` which sets the inverse (roughly) of the `month` and `day`
49 | """
50 | self._my_dt = new_dt.replace(
51 | month=13 - new_dt.month,
52 | day=31 - new_dt.day)
53 |
54 |
55 | string = '''{"myStr": 42, "listOFInt": [1, "2", 3]}'''
56 | # Uses the default value for `my_dt`, with year=2000, month=1, day=1
57 | c = MyClass.from_json(string)
58 |
59 | print(repr(c))
60 | # prints:
61 | # MyClass(my_str='42', list_of_int=[1, 2, 3], my_dt=datetime.datetime(2010, 12, 30, 0, 0))
62 |
63 | my_dict = {'My_Str': 'string', 'myDT': '2021-01-20T15:55:30Z'}
64 | c = MyClass.from_dict(my_dict)
65 |
66 | print(repr(c))
67 | # prints:
68 | # MyClass(my_str='string', list_of_int=[], my_dt=datetime.datetime(2010, 12, 11, 15, 55, 30, tzinfo=datetime.timezone.utc))
69 |
70 | print(c.to_json())
71 | # prints:
72 | # {"myStr": "string", "listOfInt": [], "myDt": "2010-12-11T15:55:30Z"}
73 |
--------------------------------------------------------------------------------
/dataclass_wizard/v1/enums.py:
--------------------------------------------------------------------------------
1 | from enum import Enum
2 |
3 | from ..utils.string_conv import (to_camel_case,
4 | to_lisp_case,
5 | to_pascal_case,
6 | to_snake_case)
7 | from ..utils.wrappers import FuncWrapper
8 |
9 |
10 | class KeyAction(Enum):
11 | """
12 | Specifies how to handle unknown keys encountered during deserialization.
13 |
14 | Actions:
15 | - `IGNORE`: Skip unknown keys silently.
16 | - `RAISE`: Raise an exception upon encountering the first unknown key.
17 | - `WARN`: Log a warning for each unknown key.
18 |
19 | For capturing unknown keys (e.g., including them in a dataclass), use the `CatchAll` field.
20 | More details: https://dcw.ritviknag.com/en/latest/common_use_cases/handling_unknown_json_keys.html#capturing-unknown-keys-with-catchall
21 | """
22 | IGNORE = 0 # Silently skip unknown keys.
23 | RAISE = 1 # Raise an exception for the first unknown key.
24 | WARN = 2 # Log a warning for each unknown key.
25 | # INCLUDE = 3
26 |
27 |
28 | class KeyCase(Enum):
29 | """
30 | Defines transformations for string keys, commonly used for mapping JSON keys to dataclass fields.
31 |
32 | Key transformations:
33 |
34 | - `CAMEL`: Converts snake_case to camelCase.
35 | Example: `my_field_name` -> `myFieldName`
36 | - `PASCAL`: Converts snake_case to PascalCase (UpperCamelCase).
37 | Example: `my_field_name` -> `MyFieldName`
38 | - `KEBAB`: Converts camelCase or snake_case to kebab-case.
39 | Example: `myFieldName` -> `my-field-name`
40 | - `SNAKE`: Converts camelCase to snake_case.
41 | Example: `myFieldName` -> `my_field_name`
42 | - `AUTO`: Automatically maps JSON keys to dataclass fields by
43 | attempting all valid key casing transforms at runtime.
44 | Example: `My-Field-Name` -> `my_field_name` (cached for future lookups)
45 |
46 | By default, no transformation is applied:
47 | * Example: `MY_FIELD_NAME` -> `MY_FIELD_NAME`
48 | """
49 | # Key casing options
50 | CAMEL = C = FuncWrapper(to_camel_case) # Convert to `camelCase`
51 | PASCAL = P = FuncWrapper(to_pascal_case) # Convert to `PascalCase`
52 | KEBAB = K = FuncWrapper(to_lisp_case) # Convert to `kebab-case`
53 | SNAKE = S = FuncWrapper(to_snake_case) # Convert to `snake_case`
54 | AUTO = A = None # Attempt all valid casing transforms at runtime.
55 |
56 | def __call__(self, *args):
57 | """Apply the key transformation."""
58 | return self.value.f(*args)
59 |
60 |
61 | class DateTimeTo(Enum):
62 | ISO = 0 # ISO 8601 string (default)
63 | TIMESTAMP = 1 # Unix timestamp (seconds)
64 |
--------------------------------------------------------------------------------
/tests/unit/test_property_wizard_with_future_import.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | import logging
4 | from dataclasses import dataclass, field
5 |
6 | from dataclass_wizard import property_wizard
7 |
8 |
9 | log = logging.getLogger(__name__)
10 |
11 |
12 | def test_property_wizard_with_public_property_and_field_with_or():
13 | """
14 | Using `property_wizard` when the dataclass has both a property and field
15 | name *without* a leading underscore, and using the OR ("|") operator,
16 | instead of the `typing.Union` usage.
17 | """
18 | @dataclass
19 | class Vehicle(metaclass=property_wizard):
20 |
21 | # The value of `wheels` here will be ignored, since `wheels` is simply
22 | # re-assigned on the following property definition.
23 | wheels: int | str = 4
24 |
25 | @property
26 | def wheels(self) -> int:
27 | return self._wheels
28 |
29 | @wheels.setter
30 | def wheels(self, wheels: int | str):
31 | self._wheels = int(wheels)
32 |
33 | v = Vehicle()
34 | log.debug(v)
35 | assert v.wheels == 0
36 |
37 | v = Vehicle(wheels=3)
38 | log.debug(v)
39 | assert v.wheels == 3
40 |
41 | v = Vehicle('6')
42 | log.debug(v)
43 | assert v.wheels == 6, 'The constructor should use our setter method'
44 |
45 | v.wheels = '123'
46 | assert v.wheels == 123, 'Expected assignment to use the setter method'
47 |
48 |
49 | def test_property_wizard_with_unresolvable_forward_ref():
50 | """
51 | Using `property_wizard` when the annotated field for a property references
52 | a class or type that is not yet declared.
53 | """
54 | @dataclass
55 | class Car:
56 | spare_tires: int
57 |
58 | class Truck:
59 | ...
60 |
61 | globals().update(locals())
62 |
63 | @dataclass
64 | class Vehicle(metaclass=property_wizard):
65 |
66 | # The value of `cars` here will be ignored, since `cars` is simply
67 | # re-assigned on the following property definition.
68 | cars: list[Car] = field(default_factory=list)
69 | trucks: list[Truck] = field(default_factory=list)
70 |
71 | @property
72 | def cars(self) -> int:
73 | return self._cars
74 |
75 | @cars.setter
76 | def cars(self, cars: list[Car]):
77 | self._cars = cars * 2 if cars else cars
78 |
79 |
80 | v = Vehicle()
81 | log.debug(v)
82 | assert not v.cars
83 | # assert v.cars is None
84 |
85 | v = Vehicle([Car(1)])
86 | log.debug(v)
87 | assert v.cars == [Car(1), Car(1)], 'The constructor should use our ' \
88 | 'setter method'
89 |
90 | v.cars = [Car(3)]
91 | assert v.cars == [Car(3), Car(3)], 'Expected assignment to use the ' \
92 | 'setter method'
93 |
--------------------------------------------------------------------------------
/docs/_templates/sidebarintro.html:
--------------------------------------------------------------------------------
1 |
7 |
8 |
23 |
33 |
34 |
35 |
36 |
37 | Bring Python dataclasses to life — the wizard way!
38 |
39 |
40 |
41 |
42 |
43 | Useful Links
44 |
76 |
--------------------------------------------------------------------------------
/run_bench.py:
--------------------------------------------------------------------------------
1 | import glob
2 | import json
3 | import os
4 | import shutil
5 | import subprocess
6 | import matplotlib.pyplot as plt
7 |
8 |
9 | def run_benchmarks():
10 | # Ensure the `.benchmarks` folder exists
11 | os.makedirs(".benchmarks", exist_ok=True)
12 |
13 | # Run pytest benchmarks and save results
14 | print("Running benchmarks...")
15 | result = subprocess.run(
16 | ["pytest", "benchmarks/catch_all.py", "--benchmark-save=benchmark_results"],
17 | capture_output=True,
18 | text=True
19 | )
20 | print(result.stdout)
21 |
22 |
23 | def load_benchmark_results(file_path):
24 | """Load the benchmark results from the provided JSON file."""
25 | with open(file_path, "r") as f:
26 | return json.load(f)
27 |
28 |
29 | def plot_relative_performance(results):
30 | """Plot relative performance for different benchmark groups."""
31 | benchmarks = results["benchmarks"]
32 |
33 | # Extract and format data
34 | names = []
35 | ops = []
36 | for bm in benchmarks:
37 | group = bm.get("group", "")
38 | library = "dataclass-wizard" if "wizard" in bm["name"] else "dataclasses-json"
39 | formatted_name = f"{group} ({library})"
40 | names.append(formatted_name)
41 | ops.append(bm["stats"]["ops"])
42 |
43 | # Calculate relative performance (ratio of each ops to the slowest ops)
44 | baseline = min(ops)
45 | relative_performance = [op / baseline for op in ops]
46 |
47 | # Plot bar chart
48 | plt.figure(figsize=(10, 6))
49 | bars = plt.barh(names, relative_performance, color="skyblue")
50 | plt.xlabel("Performance Relative to Slowest (times faster)")
51 | plt.title("Catch All: Relative Performance of dataclass-wizard vs dataclasses-json")
52 | plt.tight_layout()
53 |
54 | # Add data labels to the bars
55 | for bar, rel_perf in zip(bars, relative_performance):
56 | plt.text(bar.get_width() + 0.1, bar.get_y() + bar.get_height() / 2,
57 | f"{rel_perf:.1f}x", va="center")
58 |
59 | # Save and display the plot
60 | plt.savefig("catch_all.png")
61 | plt.show()
62 |
63 |
64 | def find_latest_benchmark_file():
65 | """Find the most recent benchmark result file."""
66 | benchmark_dir = ".benchmarks"
67 | pattern = os.path.join(benchmark_dir, "**", "*.json")
68 | files = glob.glob(pattern, recursive=True)
69 | if not files:
70 | raise FileNotFoundError("No benchmark files found.")
71 | latest_file = max(files, key=os.path.getctime) # Find the most recently created file
72 | return latest_file
73 |
74 |
75 | if __name__ == "__main__":
76 | # Step 1: Run benchmarks
77 | run_benchmarks()
78 |
79 | # Step 2: Find the latest benchmark results file
80 | benchmark_file = find_latest_benchmark_file()
81 | print(f"Latest benchmark file: {benchmark_file}")
82 |
83 | # Step 3: Load the benchmark results
84 | if os.path.exists(benchmark_file):
85 | results = load_benchmark_results(benchmark_file)
86 |
87 | # Step 4: Plot results
88 | plot_relative_performance(results)
89 |
90 | else:
91 | print(f"Benchmark file not found: {benchmark_file}")
92 |
93 | # Step 5: Move the generated image to docs folder for easy access
94 | shutil.copy("catch_all.png", "docs/")
95 |
--------------------------------------------------------------------------------
/docs/common_use_cases/easier_debug_mode.rst:
--------------------------------------------------------------------------------
1 | Easier Debug Mode
2 | =================
3 |
4 | The ``dataclass-wizard`` library provides a convenient way to enable logging for debugging. While one approach is to enable the ``debug_enabled`` flag in ``JSONWizard.Meta``, this requires proper setup of the ``logging`` module, as shown below:
5 |
6 | .. code:: python3
7 |
8 | import logging
9 | from dataclasses import dataclass
10 | from dataclass_wizard import JSONWizard
11 |
12 | # Manually set logging level
13 | logging.basicConfig(level=logging.DEBUG)
14 |
15 | @dataclass
16 | class MyClass(JSONWizard):
17 | class _(JSONWizard.Meta):
18 | debug_enabled = True
19 |
20 | Simpler Debugging with ``debug``
21 | --------------------------------
22 |
23 | A simpler and more flexible approach is to pass the ``debug`` argument directly when subclassing ``JSONWizard``. This not only sets the ``logging.basicConfig(level=logging.DEBUG)`` automatically, but also lets you customize the log level by passing a value like ``logging.INFO`` or ``logging.DEBUG``:
24 |
25 | .. code:: python3
26 |
27 | import logging
28 | from dataclasses import dataclass
29 | from dataclass_wizard import JSONWizard
30 |
31 | @dataclass
32 | class MyClass(JSONWizard, debug=logging.INFO):
33 | class _(JSONWizard.Meta):
34 | skip_defaults = True
35 | key_transform_with_dump = 'PASCAL'
36 |
37 | my_bool: bool
38 | my_int: int = 2
39 |
40 | @classmethod
41 | def _pre_from_dict(cls, o):
42 | o['myBool'] = True
43 | return o
44 |
45 | # Setting `debug=logging.INFO` automatically configures the logger:
46 | # logging.getLogger('dataclass_wizard').setLevel(logging.INFO)
47 |
48 | c = MyClass.from_dict({'myBool': 'false'})
49 | print(c)
50 | # {
51 | # "MyBool": true
52 | # }
53 |
54 | Key Points
55 | ----------
56 |
57 | 1. **Automatic Logging Setup**:
58 | When ``debug=True`` (or ``debug=logging.DEBUG``, etc.), ``logging.basicConfig(level=logging.DEBUG)`` is automatically configured for the library.
59 |
60 | 2. **Custom Log Levels**:
61 | - Pass a **boolean** (``True``) to enable ``DEBUG`` level logs.
62 | - Pass a **logging level** (e.g., ``logging.INFO``, ``logging.WARNING``) to set a custom log level.
63 | This internally maps to ``JSONWizard.Meta.debug_enabled``, configuring the library’s logger with the specified level.
64 |
65 | 3. **Library Logger**:
66 | The library logger (``dataclass_wizard``) is dynamically set via ``logging.getLogger('dataclass_wizard').setLevel(input_level)`` based on the ``debug`` argument.
67 |
68 | 4. **Convenient Defaults**:
69 | No need to manually configure ``logging.basicConfig`` or adjust log levels outside your class definition.
70 |
71 | Examples of Log Levels
72 | ----------------------
73 |
74 | .. code:: python3
75 |
76 | import logging
77 | from dataclasses import dataclass
78 | from dataclass_wizard import JSONWizard
79 |
80 | @dataclass
81 | class DebugExample(JSONWizard, debug=True):
82 | ... # DEBUG level (default for boolean True)
83 |
84 | @dataclass
85 | class InfoExample(JSONWizard, debug="INFO"):
86 | ... # INFO level
87 |
88 | @dataclass
89 | class WarningExample(JSONWizard, debug=logging.WARNING):
90 | ... # WARNING level
91 |
--------------------------------------------------------------------------------
/docs/common_use_cases/skip_inheritance.rst:
--------------------------------------------------------------------------------
1 | Skip the Class Inheritance
2 | --------------------------
3 |
4 | It is important to note that the main purpose of sub-classing from
5 | ``JSONWizard`` Mixin class is to provide helper methods like :meth:`from_dict`
6 | and :meth:`to_dict`, which makes it much more convenient and easier to load or
7 | dump your data class from and to JSON.
8 |
9 | That is, it's meant to *complement* the usage of the ``dataclass`` decorator,
10 | rather than to serve as a drop-in replacement for data classes, or to provide type
11 | validation for example; there are already excellent libraries like `pydantic`_ that
12 | provide these features if so desired.
13 |
14 | However, there may be use cases where we prefer to do away with the class
15 | inheritance model introduced by the Mixin class. In the interests of convenience
16 | and also so that data classes can be used *as is*, the Dataclass
17 | Wizard library provides the helper functions :func:`fromlist` and :func:`fromdict`
18 | for de-serialization, and :func:`asdict` for serialization. These functions also
19 | work recursively, so there is full support for nested dataclasses -- just as with
20 | the class inheritance approach.
21 |
22 | Here is an example to demonstrate the usage of these helper functions:
23 |
24 | .. code:: python3
25 |
26 | from dataclasses import dataclass
27 | from datetime import datetime
28 | from typing import Optional, Union
29 |
30 | from dataclass_wizard import fromdict, asdict, DumpMeta
31 |
32 |
33 | @dataclass
34 | class Container:
35 | id: int
36 | created_at: datetime
37 | my_elements: list['MyElement']
38 |
39 |
40 | @dataclass
41 | class MyElement:
42 | order_index: Optional[int]
43 | status_code: Union[int, str]
44 |
45 |
46 | source_dict = {'id': '123',
47 | 'createdAt': '2021-01-01 05:00:00Z',
48 | 'myElements': [
49 | {'orderIndex': 111, 'statusCode': '200'},
50 | {'order_index': '222', 'status_code': 404}
51 | ]}
52 |
53 | # De-serialize the JSON dictionary object into a `Container` instance.
54 | c = fromdict(Container, source_dict)
55 |
56 | print(repr(c))
57 | # prints:
58 | # Container(id=123, created_at=datetime.datetime(2021, 1, 1, 5, 0), my_elements=[MyElement(order_index=111, status_code='200'), MyElement(order_index=222, status_code=404)])
59 |
60 | # (Optional) Set up dump config for the inner class, as unfortunately there's
61 | # no option currently to have the meta config apply in a recursive fashion.
62 | _ = DumpMeta(MyElement, key_transform='SNAKE')
63 |
64 | # Serialize the `Container` instance to a Python dict object with a custom
65 | # dump config, for example one which converts field names to snake case.
66 | json_dict = asdict(c, DumpMeta(Container,
67 | key_transform='SNAKE',
68 | marshal_date_time_as='TIMESTAMP'))
69 |
70 | expected_dict = {'id': 123,
71 | 'created_at': 1609477200,
72 | 'my_elements': [
73 | {'order_index': 111, 'status_code': '200'},
74 | {'order_index': 222, 'status_code': 404}
75 | ]}
76 |
77 | # Assert that we get the expected dictionary object.
78 | assert json_dict == expected_dict
79 |
80 |
81 | .. _`pydantic`: https://pydantic-docs.helpmanual.io/
82 |
--------------------------------------------------------------------------------
/recipe/meta.yaml:
--------------------------------------------------------------------------------
1 | # Recipe used to publish this package to Anaconda and Conda Forge
2 | #
3 | # Note:
4 | # To publish, replace `source -> sha256` below, and run `make release-conda`
5 | #
6 | # Credits:
7 | # - https://github.com/conda-forge/staged-recipes
8 | # - https://docs.conda.io/projects/conda-build/en/latest/resources/define-metadata.html
9 |
10 | {% set data = load_setup_py_data(setup_file='../setup.py', from_recipe_dir=True) %}
11 | {% set name = data['name'] %}
12 | {% set version = data['version'] %}
13 | {% set author = "rnag" %}
14 | {% set repo_url = data['url'] %}
15 | {% set description = data['description'] %}
16 |
17 | package:
18 | name: {{ name|lower }}
19 | version: {{ version }}
20 |
21 | source:
22 | url: https://pypi.io/packages/source/{{ name[0] }}/{{ name }}/{{ name }}-{{ version }}.tar.gz
23 | sha256: 1a870882a8ff19e7ab9ede7b672b2f7c1ce8d69bbd2fc6d9629da749227268fd
24 | # sha256 is the preferred checksum -- you can get it for a file with:
25 | # `openssl sha256 `.
26 | # You may need the openssl package, available on conda-forge:
27 | # `conda install openssl -c conda-forge`
28 |
29 | build:
30 | number: 0
31 | entry_points:
32 | - wiz={{ name|replace('-', '_') }}.wizard_cli.cli:main
33 | script: {{ PYTHON }} -m pip install . -vv
34 | noarch: python
35 | # Add the line "skip: True # [py<35]" (for example) to limit to Python 3.5 and newer, or "skip: True # [not win]" to limit to Windows.
36 | skip: True # [py<39]
37 |
38 | requirements:
39 | host:
40 | - python
41 | - pip
42 | - setuptools
43 | run:
44 | - python
45 | - typing-extensions >=4.9.0 # [py<=312]
46 |
47 | test:
48 | imports:
49 | - {{ name|replace('-', '_') }}
50 | requires:
51 | - pip
52 | # - pytest
53 | commands:
54 | - pip check
55 | - wiz --help
56 | # - pytest -v
57 |
58 | about:
59 | home: {{ repo_url }}
60 | # See https://spdx.org/licenses/
61 | license: Apache-2.0
62 | # The license_family, i.e. "BSD" if license is "BSD-3-Clause". (optional)
63 | license_family: Apache
64 | # It is strongly encouraged to include a license file in the package,
65 | # (even if the license doesn't require it) using the license_file entry.
66 | # See https://docs.conda.io/projects/conda-build/en/latest/resources/define-metadata.html#license-file
67 | license_file: LICENSE
68 | summary: Lightning-fast JSON wizardry for Python dataclasses — effortless serialization right out of the box!
69 | # The remaining entries in this section are optional, but recommended.
70 | description: |
71 | The dataclass-wizard library provides a set of simple, yet
72 | elegant *wizarding* tools for interacting with the Python
73 | `dataclasses` module in Python 3.9+.
74 |
75 | The primary use is as a fast serialization framework that enables
76 | dataclass instances to be converted to/from JSON; this works well
77 | in particular with a *nested dataclass* model.
78 |
79 | The dataclass-wizard is pure Python code that relies entirely on
80 | stdlib, with the only added dependency being
81 | `typing-extensions`
82 | for Python 3.12 and below.
83 | doc_url: https://{{ name }}.readthedocs.io/
84 | dev_url: {{ repo_url }}
85 |
86 | extra:
87 | recipe-maintainers:
88 | # GitHub IDs for maintainers of the recipe.
89 | # Always check with the people listed below if they are OK becoming maintainers of the recipe. (There will be spam!)
90 | - {{ author }}
91 |
--------------------------------------------------------------------------------
/dataclass_wizard/utils/object_path.pyi:
--------------------------------------------------------------------------------
1 | from dataclasses import MISSING
2 | from typing import Any, Sequence, TypeAlias, Union
3 |
4 | PathPart: TypeAlias = Union[str, int, float, bool]
5 | PathType: TypeAlias = Sequence[PathPart]
6 |
7 |
8 | def safe_get(data: dict | list,
9 | path: PathType,
10 | default=MISSING,
11 | raise_: bool = True) -> Any:
12 | """
13 | Retrieve a value from a nested structure safely.
14 |
15 | Traverses a nested structure (e.g., dictionaries or lists) following a sequence of keys or indices specified in `path`.
16 | Handles missing keys, out-of-bounds indices, or invalid types gracefully.
17 |
18 | Args:
19 | data (Any): The nested structure to traverse.
20 | path (Iterable): A sequence of keys or indices to follow.
21 | default (Any): The value to return if the path cannot be fully traversed.
22 | If not provided and an error occurs, the exception is re-raised.
23 | raise_ (bool): True to raise an error on invalid path (default True).
24 |
25 | Returns:
26 | Any: The value at the specified path, or `default` if traversal fails.
27 |
28 | Raises:
29 | KeyError, IndexError, AttributeError, TypeError: If `default` is not provided
30 | and an error occurs during traversal.
31 | """
32 | ...
33 |
34 |
35 | def v1_safe_get(data: dict | list,
36 | path: PathType,
37 | raise_: bool) -> Any:
38 | """
39 | Retrieve a value from a nested structure safely.
40 |
41 | Traverses a nested structure (e.g., dictionaries or lists) following a sequence of keys or indices specified in `path`.
42 | Handles missing keys, out-of-bounds indices, or invalid types gracefully.
43 |
44 | Args:
45 | data (Any): The nested structure to traverse.
46 | path (Iterable): A sequence of keys or indices to follow.
47 | raise_ (bool): True to raise an error on invalid path.
48 |
49 | Returns:
50 | Any: The value at the specified path, or `MISSING` if traversal fails.
51 |
52 | Raises:
53 | KeyError, IndexError, AttributeError, TypeError: If `default` is not provided
54 | and an error occurs during traversal.
55 | """
56 | ...
57 |
58 |
59 | def _format_err(e: Exception,
60 | current_data: Any,
61 | path: PathType,
62 | current_path: PathPart):
63 | """Format and return a `ParseError`."""
64 | ...
65 |
66 |
67 | def split_object_path(_input: str) -> PathType:
68 | """
69 | Parse a custom object path string into a list of components.
70 |
71 | This function interprets a custom object path syntax and breaks it into individual path components,
72 | including dictionary keys, list indices, attributes, and nested elements.
73 | It handles escaped characters and supports mixed types (e.g., strings, integers, floats, booleans).
74 |
75 | Args:
76 | _input (str): The object path string to parse.
77 |
78 | Returns:
79 | PathType: A list of components representing the parsed path. Components can be strings,
80 | integers, floats, booleans, or other valid key/index types.
81 |
82 | Example:
83 | >>> split_object_path(r'''a[b][c]["d\\\"o\\\""][e].f[go]['1'].then."y\\e\\\"s"[1]["we can!"].five.2.3.[ok][4.56].[-7.89].'let\\'sd\\othisy\\'all!'.yeah.123.False['True'].thanks!''')
84 | ['a', 'b', 'c', 'd"o"', 'e', 'f', 'go', '1', 'then', 'y\\e"s', 1, 'we can!', 'five', 2, 3, 'ok', 4.56, -7.89,
85 | "let'sd\\othisy'all!", 'yeah', 123, False, 'True', 'thanks!']
86 | """
87 |
--------------------------------------------------------------------------------
/tests/unit/environ/test_loaders.py:
--------------------------------------------------------------------------------
1 | import os
2 | from collections import namedtuple
3 | from dataclasses import dataclass
4 | from datetime import datetime, date, timezone
5 | from typing import Tuple, NamedTuple, List
6 |
7 | import pytest
8 |
9 | from dataclass_wizard import EnvWizard
10 | from dataclass_wizard.environ.loaders import EnvLoader
11 |
12 |
13 | def test_load_to_bytes():
14 | assert EnvLoader.load_to_bytes('testing 123', bytes) == b'testing 123'
15 |
16 |
17 | @pytest.mark.parametrize(
18 | 'input,expected',
19 | [
20 | ('testing 123', bytearray(b'testing 123')),
21 | (b'test', bytearray(b'test')),
22 | ([1, 2, 3], bytearray([1, 2, 3]))
23 | ]
24 | )
25 | def test_load_to_bytearray(input, expected):
26 | assert EnvLoader.load_to_byte_array(input, bytearray) == expected
27 |
28 |
29 | def test_load_to_tuple_and_named_tuple():
30 | os.environ['MY_TUP'] = '1,2,3'
31 | os.environ['MY_NT'] = '[1.23, "string"]'
32 | os.environ['my_untyped_nt'] = 'hello , world, 123'
33 |
34 | class MyNT(NamedTuple):
35 | my_float: float
36 | my_str: str
37 |
38 | untyped_tup = namedtuple('untyped_tup', ('a', 'b', 'c'))
39 |
40 | class MyClass(EnvWizard, reload_env=True):
41 | my_tup: Tuple[int, ...]
42 | my_nt: MyNT
43 | my_untyped_nt: untyped_tup
44 |
45 | c = MyClass()
46 |
47 | assert c.dict() == {'my_nt': MyNT(my_float=1.23, my_str='string'),
48 | 'my_tup': (1, 2, 3),
49 | 'my_untyped_nt': untyped_tup(a='hello', b='world', c='123')}
50 |
51 | assert c.to_dict() == {'my_nt': MyNT(my_float=1.23, my_str='string'),
52 | 'my_tup': (1, 2, 3),
53 | 'my_untyped_nt': untyped_tup(a='hello', b='world', c='123')}
54 |
55 |
56 | def test_load_to_dataclass():
57 | """When an `EnvWizard` subclass has a nested dataclass schema."""
58 |
59 | os.environ['inner_cls_1'] = 'my_bool=false, my_string=test'
60 | os.environ['inner_cls_2'] = '{"answerToLife": "42", "MyList": "testing, 123 , hello!"}'
61 |
62 | @dataclass
63 | class Inner1:
64 | my_bool: bool
65 | my_string: str
66 |
67 | @dataclass
68 | class Inner2:
69 | answer_to_life: int
70 | my_list: List[str]
71 |
72 | class MyClass(EnvWizard, reload_env=True):
73 |
74 | inner_cls_1: Inner1
75 | inner_cls_2: Inner2
76 |
77 | c = MyClass()
78 | # print(c)
79 |
80 | assert c.dict() == {
81 | 'inner_cls_1': Inner1(my_bool=False,
82 | my_string='test'),
83 | 'inner_cls_2': Inner2(answer_to_life=42,
84 | my_list=['testing', '123', 'hello!']),
85 | }
86 |
87 | assert c.to_dict() == {
88 | 'inner_cls_1': {'my_bool': False,
89 | 'my_string': 'test'},
90 | 'inner_cls_2': {'answer_to_life': 42,
91 | 'my_list': ['testing', '123', 'hello!']}
92 | }
93 |
94 |
95 | @pytest.mark.parametrize(
96 | 'input,expected',
97 | [
98 | ('2021-11-28T17:35:55', datetime(2021, 11, 28, 17, 35, 55)),
99 | (1577952245, datetime(2020, 1, 2, 8, 4, 5, tzinfo=timezone.utc)),
100 | (datetime.min, datetime.min)
101 | ]
102 | )
103 | def test_load_to_datetime(input, expected):
104 | assert EnvLoader.load_to_datetime(input, datetime) == expected
105 |
106 |
107 | @pytest.mark.parametrize(
108 | 'input,expected',
109 | [
110 | ('2021-11-28', date(2021, 11, 28)),
111 | (1577952245, date(2020, 1, 2)),
112 | (date.min, date.min)
113 | ]
114 | )
115 | def test_load_to_date(input, expected):
116 | assert EnvLoader.load_to_date(input, date) == expected
117 |
--------------------------------------------------------------------------------
/benchmarks/catch_all.py:
--------------------------------------------------------------------------------
1 | import logging
2 | from dataclasses import dataclass
3 | from typing import Any
4 |
5 | import pytest
6 |
7 | from dataclasses_json import (dataclass_json, Undefined, CatchAll as CatchAllDJ)
8 | from dataclass_wizard import (JSONWizard, CatchAll as CatchAllWizard)
9 |
10 |
11 | log = logging.getLogger(__name__)
12 |
13 |
14 | @dataclass()
15 | class DontCareAPIDump:
16 | endpoint: str
17 | data: dict[str, Any]
18 |
19 |
20 | @dataclass_json(undefined=Undefined.INCLUDE)
21 | @dataclass()
22 | class DontCareAPIDumpDJ(DontCareAPIDump):
23 | unknown_things: CatchAllDJ
24 |
25 |
26 | @dataclass()
27 | class DontCareAPIDumpWizard(DontCareAPIDump, JSONWizard):
28 |
29 | class _(JSONWizard.Meta):
30 | v1 = True
31 |
32 | unknown_things: CatchAllWizard
33 |
34 |
35 | # Fixtures for test data
36 | @pytest.fixture(scope='session')
37 | def data():
38 | return {"endpoint": "some_api_endpoint",
39 | "data": {"foo": 1, "bar": "2"},
40 | "undefined_field_name": [1, 2, 3]}
41 |
42 |
43 | @pytest.fixture(scope='session')
44 | def data_no_extras():
45 | return {"endpoint": "some_api_endpoint",
46 | "data": {"foo": 1, "bar": "2"}}
47 |
48 |
49 | # Benchmark for deserialization (from_dict)
50 | @pytest.mark.benchmark(group="deserialization")
51 | def test_deserialize_wizard(benchmark, data):
52 | benchmark(lambda: DontCareAPIDumpWizard.from_dict(data))
53 |
54 |
55 | @pytest.mark.benchmark(group="deserialization")
56 | def test_deserialize_json(benchmark, data):
57 | benchmark(lambda: DontCareAPIDumpDJ.from_dict(data))
58 |
59 |
60 | # Benchmark for deserialization with no extra data
61 | @pytest.mark.benchmark(group="deserialization_no_extra_data")
62 | def test_deserialize_wizard_no_extras(benchmark, data_no_extras):
63 | benchmark(lambda: DontCareAPIDumpWizard.from_dict(data_no_extras))
64 |
65 |
66 | @pytest.mark.benchmark(group="deserialization_no_extra_data")
67 | def test_deserialize_json_no_extras(benchmark, data_no_extras):
68 | benchmark(lambda: DontCareAPIDumpDJ.from_dict(data_no_extras))
69 |
70 |
71 | # Benchmark for serialization (to_dict)
72 | @pytest.mark.benchmark(group="serialization")
73 | def test_serialize_wizard(benchmark, data):
74 | dump1 = DontCareAPIDumpWizard.from_dict(data)
75 | benchmark(lambda: dump1.to_dict())
76 |
77 |
78 | @pytest.mark.benchmark(group="serialization")
79 | def test_serialize_json(benchmark, data):
80 | dump2 = DontCareAPIDumpDJ.from_dict(data)
81 | benchmark(lambda: dump2.to_dict())
82 |
83 |
84 | def test_validate(data, data_no_extras):
85 | dump1 = DontCareAPIDumpDJ.from_dict(data_no_extras) # DontCareAPIDump(endpoint='some_api_endpoint', data={'foo': 1, 'bar': '2'})
86 | dump2 = DontCareAPIDumpWizard.from_dict(data_no_extras) # DontCareAPIDump(endpoint='some_api_endpoint', data={'foo': 1, 'bar': '2'})
87 |
88 | assert dump1.endpoint == dump2.endpoint
89 | assert dump1.data == dump2.data
90 | assert dump1.unknown_things == dump2.unknown_things == {}
91 |
92 | expected = {'endpoint': 'some_api_endpoint', 'data': {'foo': 1, 'bar': '2'}}
93 |
94 | assert dump1.to_dict() == dump2.to_dict() == expected
95 |
96 | dump1 = DontCareAPIDumpDJ.from_dict(data) # DontCareAPIDump(endpoint='some_api_endpoint', data={'foo': 1, 'bar': '2'})
97 | dump2 = DontCareAPIDumpWizard.from_dict(data) # DontCareAPIDump(endpoint='some_api_endpoint', data={'foo': 1, 'bar': '2'})
98 |
99 | assert dump1.endpoint == dump2.endpoint
100 | assert dump1.data == dump2.data
101 | assert dump1.unknown_things == dump2.unknown_things
102 |
103 | expected = {'endpoint': 'some_api_endpoint', 'data': {'foo': 1, 'bar': '2'}, 'undefined_field_name': [1, 2, 3]}
104 |
105 | assert dump1.to_dict() == dump2.to_dict() == expected
106 |
--------------------------------------------------------------------------------
/docs/common_use_cases/cyclic_or_recursive_dataclasses.rst:
--------------------------------------------------------------------------------
1 | Cyclic or "Recursive" Dataclasses
2 | =================================
3 |
4 | .. note::
5 | **Important:** The current functionality for cyclic or "recursive" dataclasses is being re-imagined.
6 | Please refer to the new docs for **V1 Opt-in** features, which introduces enhanced support for these use
7 | cases. For more details, see the `Field Guide to V1 Opt‐in`_ and the `Recursive Types and Dataclasses with Cyclic References in V1`_ documentation.
8 |
9 | This change is part of the ongoing improvements in version ``v0.34.0+``, and the old functionality will no longer be maintained in future releases.
10 |
11 | .. _Field Guide to V1 Opt‐in: https://github.com/rnag/dataclass-wizard/wiki/Field-Guide-to-V1-Opt%E2%80%90in
12 | .. _Recursive Types and Dataclasses with Cyclic References in V1: https://github.com/rnag/dataclass-wizard/wiki/V1:-Recursive-Types-and-Dataclasses-with-Cyclic-References
13 |
14 | Prior to version ``v0.27.0``, dataclasses with cyclic references
15 | or self-referential structures were not supported. This
16 | limitation is shown in the following toy example:
17 |
18 | .. code:: python3
19 |
20 | from dataclasses import dataclass
21 |
22 | from dataclass_wizard import JSONWizard
23 |
24 |
25 | @dataclass
26 | class A(JSONWizard):
27 | a: 'A | None' = None
28 |
29 |
30 | a = A.from_dict({'a': {'a': {'a': None}}})
31 | assert a == A(a=A(a=A(a=None)))
32 |
33 | This has been a `longstanding issue`_.
34 |
35 | New in ``v0.27.0``: The Dataclass Wizard now extends its support
36 | to cyclic and self-referential dataclass models.
37 |
38 | The example below demonstrates recursive dataclasses with cyclic
39 | dependencies, following the pattern ``A -> B -> A -> B``.
40 |
41 | With Class Inheritance
42 | **********************
43 |
44 | Here’s a basic example demonstrating the use of recursive dataclasses
45 | with cyclic dependencies, using a class inheritance model and
46 | the :class:`JSONWizard` mixin:
47 |
48 | .. code:: python3
49 |
50 | from __future__ import annotations # This can be removed in Python 3.10+
51 |
52 | from dataclasses import dataclass
53 |
54 | from dataclass_wizard import JSONWizard
55 |
56 |
57 | @dataclass
58 | class A(JSONWizard):
59 | class _(JSONWizard.Meta):
60 | # enable support for self-referential / recursive dataclasses
61 | recursive_classes = True
62 |
63 | b: 'B | None' = None
64 |
65 |
66 | @dataclass
67 | class B:
68 | a: A | None = None
69 |
70 |
71 | # confirm that `from_dict` with a recursive, self-referential
72 | # input `dict` works as expected.
73 | a = A.from_dict({'b': {'a': {'b': {'a': None}}}})
74 | assert a == A(b=B(a=A(b=B())))
75 |
76 | Without Class Inheritance
77 | *************************
78 |
79 | Here is the same example as above, but with relying solely on ``dataclasses``, without
80 | using any special class inheritance model:
81 |
82 |
83 | .. code:: python3
84 |
85 | from __future__ import annotations # This can be removed in Python 3.10+
86 |
87 | from dataclasses import dataclass
88 |
89 | from dataclass_wizard import fromdict, LoadMeta
90 |
91 |
92 | @dataclass
93 | class A:
94 | b: 'B | None' = None
95 |
96 |
97 | @dataclass
98 | class B:
99 | a: A | None = None
100 |
101 |
102 | # enable support for self-referential / recursive dataclasses
103 | LoadMeta(recursive_classes=True).bind_to(A)
104 |
105 | # confirm that `from_dict` with a recursive, self-referential
106 | # input `dict` works as expected.
107 | a = fromdict(A, {'b': {'a': {'b': {'a': None}}}})
108 | assert a == A(b=B(a=A(b=B())))
109 |
110 | .. _longstanding issue: https://github.com/rnag/dataclass-wizard/issues/62
111 |
--------------------------------------------------------------------------------
/tests/unit/utils/test_string_conv.py:
--------------------------------------------------------------------------------
1 | import pytest
2 |
3 | from dataclass_wizard.utils.string_conv import *
4 |
5 |
6 | @pytest.mark.parametrize(
7 | 'string,expected',
8 | [
9 | ('device_type', 'deviceType'),
10 | ('io_error', 'ioError'),
11 | ('isACamelCasedWORD', 'isACamelCasedWORD'),
12 | ('ATitledWordToTESTWith', 'aTitledWordToTESTWith'),
13 | ('not-a-tester', 'notATester'),
14 | ('device_type', 'deviceType'),
15 | ('helloworld', 'helloworld'),
16 | ('A', 'a'),
17 | ('TESTing_if_thisWorks', 'tESTingIfThisWorks'),
18 | ('a_B_Cde_fG_hi', 'aBCdeFGHi'),
19 | ('ALL_CAPS', 'aLLCAPS'),
20 | ('WoRd', 'woRd'),
21 | ('HIThereHOWIsItGoinG', 'hIThereHOWIsItGoinG'),
22 | ('How_-Are-_YoUDoing__TeST', 'howAreYoUDoingTeST'),
23 | ('thisIsWithANumber42ToTEST', 'thisIsWithANumber42ToTEST'),
24 | ('Number 42 With spaces', 'number42WithSpaces')
25 | ]
26 | )
27 | def test_to_camel_case(string, expected):
28 | actual = to_camel_case(string)
29 | assert actual == expected
30 |
31 |
32 | @pytest.mark.parametrize(
33 | 'string,expected',
34 | [
35 | ('device_type', 'DeviceType'),
36 | ('io_error', 'IoError'),
37 | ('isACamelCasedWORD', 'IsACamelCasedWORD'),
38 | ('ATitledWordToTESTWith', 'ATitledWordToTESTWith'),
39 | ('not-a-tester', 'NotATester'),
40 | ('device_type', 'DeviceType'),
41 | ('helloworld', 'Helloworld'),
42 | ('A', 'A'),
43 | ('TESTing_if_thisWorks', 'TESTingIfThisWorks'),
44 | ('a_B_Cde_fG_hi', 'ABCdeFGHi'),
45 | ('ALL_CAPS', 'ALLCAPS'),
46 | ('WoRd', 'WoRd'),
47 | ('HIThereHOWIsItGoinG', 'HIThereHOWIsItGoinG'),
48 | ('How_-Are-_YoUDoing__TeST', 'HowAreYoUDoingTeST'),
49 | ('thisIsWithANumber42ToTEST', 'ThisIsWithANumber42ToTEST'),
50 | ('Number 42 With spaces', 'Number42WithSpaces')
51 | ]
52 | )
53 | def test_to_pascal_case(string, expected):
54 | actual = to_pascal_case(string)
55 | assert actual == expected
56 |
57 |
58 | @pytest.mark.parametrize(
59 | 'string,expected',
60 | [
61 | ('device_type', 'device-type'),
62 | ('IO_Error', 'io-error'),
63 | ('isACamelCasedWORD', 'is-a-camel-cased-word'),
64 | ('ATitledWordToTESTWith', 'a-titled-word-to-test-with'),
65 | ('not-a-tester', 'not-a-tester'),
66 | ('helloworld', 'helloworld'),
67 | ('A', 'a'),
68 | ('TESTing_if_thisWorks', 'tes-ting-if-this-works'),
69 | ('a_B_Cde_fG_hi', 'a-b-cde-f-g-hi'),
70 | ('ALL_CAPS', 'all-caps'),
71 | ('WoRd', 'wo-rd'),
72 | ('HIThereHOWIsItGoinG', 'hi-there-how-is-it-goin-g'),
73 | ('How_-Are-_YoUDoing__TeST', 'how-are-yo-u-doing-te-st'),
74 | ('thisIsWithANumber42ToTEST', 'this-is-with-a-number42-to-test'),
75 | ('Number 42 With spaces', 'number-42-with-spaces')
76 | ]
77 | )
78 | def test_to_lisp_case(string, expected):
79 | actual = to_lisp_case(string)
80 | assert actual == expected
81 |
82 |
83 | @pytest.mark.parametrize(
84 | 'string,expected',
85 | [
86 | ('device_type', 'device_type'),
87 | ('IO_Error', 'io_error'),
88 | ('isACamelCasedWORD', 'is_a_camel_cased_word'),
89 | ('ATitledWordToTESTWith', 'a_titled_word_to_test_with'),
90 | ('not-a-tester', 'not_a_tester'),
91 | ('helloworld', 'helloworld'),
92 | ('A', 'a'),
93 | ('TESTing_if_thisWorks', 'tes_ting_if_this_works'),
94 | ('a_B_Cde_fG_hi', 'a_b_cde_f_g_hi'),
95 | ('ALL_CAPS', 'all_caps'),
96 | ('WoRd', 'wo_rd'),
97 | ('HIThereHOWIsItGoinG', 'hi_there_how_is_it_goin_g'),
98 | ('How_-Are-_YoUDoing__TeST', 'how_are_yo_u_doing_te_st'),
99 | ('thisIsWithANumber42ToTEST', 'this_is_with_a_number42_to_test'),
100 | ('Number 42 With spaces', 'number_42_with_spaces')
101 | ]
102 | )
103 | def test_to_snake_case(string, expected):
104 | actual = to_snake_case(string)
105 | assert actual == expected
106 |
--------------------------------------------------------------------------------
/docs/_static/dark_mode.css:
--------------------------------------------------------------------------------
1 | /* General dark mode body */
2 | body.dark-mode {
3 | background-color: #1e1e1e;
4 | color: #cfcfcf;
5 | }
6 |
7 | /* Main page content */
8 | body.dark-mode .body {
9 | background-color: #1e1e1e;
10 | color: #cfcfcf;
11 | }
12 |
13 | /* Fix for the main content on index */
14 | body.dark-mode .content {
15 | background-color: #1e1e1e;
16 | color: #cfcfcf;
17 | }
18 |
19 | /* Sidebar elements */
20 | body.dark-mode .wy-nav-side,
21 | body.dark-mode .wy-side-nav-search {
22 | background-color: #22272e;
23 | color: #cfcfcf;
24 | }
25 |
26 | /* Headings */
27 | body.dark-mode h1,
28 | body.dark-mode h2,
29 | body.dark-mode h3,
30 | body.dark-mode h4 {
31 | color: #ffffff;
32 | }
33 |
34 | /* Links */
35 | body.dark-mode a {
36 | color: #79b8ff;
37 | }
38 |
39 | /* Code blocks */
40 | body.dark-mode pre,
41 | body.dark-mode code {
42 | background-color: #2d333b;
43 | color: #f0f0f0;
44 | }
45 |
46 | /* General REPL Python output */
47 | body.dark-mode pre.highlight,
48 | body.dark-mode code.highlight {
49 | background-color: #2d333b;
50 | color: #f0f0f0; /* Ensures all text in REPL blocks is visible */
51 | }
52 |
53 | /* Handle the '>>>', '...' prompts */
54 | body.dark-mode .highlight .gp {
55 | color: #79b8ff; /* Color for REPL prompts like '>>>' */
56 | }
57 |
58 | /* Handle REPL output */
59 | body.dark-mode .highlight .go {
60 | color: #d5a476; /* Distinct color for REPL outputs */
61 | }
62 |
63 | /* Decorators (e.g., @dataclass) */
64 | body.dark-mode .highlight .nd {
65 | color: rgba(192, 144, 2, 0.87); /* Dark, burnished gold for decorators */
66 | }
67 |
68 | /* Operators (e.g., ==, +, -, etc.) */
69 | body.dark-mode .highlight .o {
70 | color: #d5a476; /* Match your REPL output lighter gold */
71 | }
72 |
73 | /* Punctuation (e.g., . , ( )) */
74 | body.dark-mode .highlight .p {
75 | color: #cfcfcf; /* Neutral light gray for punctuation */
76 | }
77 |
78 | /* Built-in types and constants (e.g., str, int, True, False) */
79 | body.dark-mode .highlight .nb {
80 | color: #4ec9b0; /* Teal for built-in types/constants */
81 | }
82 |
83 | /* Function and variable names */
84 | body.dark-mode .highlight .nf,
85 | body.dark-mode .highlight .n {
86 | color: #9cdcfe; /* Light blue for function/variable names */
87 | }
88 |
89 | /* General admonition block */
90 | body.dark-mode .admonition {
91 | background-color: #2e3b4e; /* Neutral dark background */
92 | color: #b0b0b0; /* Softer silver text */
93 | border-left: 4px solid #79b8ff; /* Default blue accent */
94 | padding: 10px;
95 | border-radius: 6px; /* Rounded corners */
96 | }
97 |
98 | /* Title of admonition blocks */
99 | body.dark-mode .admonition .admonition-title {
100 | color: #79b8ff; /* Bright title text for clarity */
101 | font-weight: bold;
102 | }
103 |
104 | /* Specific styles for ..warning:: */
105 | body.dark-mode .admonition.warning {
106 | background-color: #4a3224; /* Warm dark terracotta */
107 | border-left-color: #d8845e; /* Subdued orange for less vibrancy */
108 | color: #d3b8a6; /* Soft beige text for a smoother contrast */
109 | }
110 |
111 | /* Specific styles for ..note:: */
112 | body.dark-mode .admonition.note {
113 | background-color: #4b4430; /* Subdued dark olive-brown background */
114 | border-left-color: #bfa45e; /* Muted goldenrod border */
115 | color: #d4c8a8; /* Softer light tan text to reduce glare */
116 | }
117 |
118 | /* Specific styles for ..tip:: */
119 | body.dark-mode .admonition.tip {
120 | background-color: #2b4e4e; /* Teal background */
121 | border-left-color: #56b6c2; /* Cyan border for tips */
122 | color: #d8e0e0; /* Softer light teal text */
123 | }
124 |
125 | /* Specific styles for ..important:: */
126 | body.dark-mode .admonition.important {
127 | background-color: #4e3b2b; /* Brownish background */
128 | border-left-color: #d19a66; /* Amber border for important */
129 | color: #e0d6d1; /* Softer light beige text */
130 | }
131 |
132 | /* Highlighting inline code within admonitions */
133 | body.dark-mode .admonition code {
134 | background-color: #2d333b;
135 | color: #f0f0f0;
136 | padding: 2px 4px;
137 | border-radius: 4px;
138 | }
139 |
--------------------------------------------------------------------------------
/docs/dataclass_wizard.rst:
--------------------------------------------------------------------------------
1 | dataclass\_wizard package
2 | =========================
3 |
4 | Subpackages
5 | -----------
6 |
7 | .. toctree::
8 | :maxdepth: 4
9 |
10 | dataclass_wizard.environ
11 | dataclass_wizard.utils
12 | dataclass_wizard.v1
13 | dataclass_wizard.wizard_cli
14 |
15 | Submodules
16 | ----------
17 |
18 | dataclass\_wizard.abstractions module
19 | -------------------------------------
20 |
21 | .. automodule:: dataclass_wizard.abstractions
22 | :members:
23 | :undoc-members:
24 | :show-inheritance:
25 |
26 | dataclass\_wizard.bases module
27 | ------------------------------
28 |
29 | .. automodule:: dataclass_wizard.bases
30 | :members:
31 | :undoc-members:
32 | :show-inheritance:
33 |
34 | dataclass\_wizard.bases\_meta module
35 | ------------------------------------
36 |
37 | .. automodule:: dataclass_wizard.bases_meta
38 | :members:
39 | :undoc-members:
40 | :show-inheritance:
41 |
42 | dataclass\_wizard.class\_helper module
43 | --------------------------------------
44 |
45 | .. automodule:: dataclass_wizard.class_helper
46 | :members:
47 | :undoc-members:
48 | :show-inheritance:
49 |
50 | dataclass\_wizard.constants module
51 | ----------------------------------
52 |
53 | .. automodule:: dataclass_wizard.constants
54 | :members:
55 | :undoc-members:
56 | :show-inheritance:
57 |
58 | dataclass\_wizard.decorators module
59 | -----------------------------------
60 |
61 | .. automodule:: dataclass_wizard.decorators
62 | :members:
63 | :undoc-members:
64 | :show-inheritance:
65 |
66 | dataclass\_wizard.dumpers module
67 | --------------------------------
68 |
69 | .. automodule:: dataclass_wizard.dumpers
70 | :members:
71 | :undoc-members:
72 | :show-inheritance:
73 |
74 | dataclass\_wizard.enums module
75 | ------------------------------
76 |
77 | .. automodule:: dataclass_wizard.enums
78 | :members:
79 | :undoc-members:
80 | :show-inheritance:
81 |
82 | dataclass\_wizard.errors module
83 | -------------------------------
84 |
85 | .. automodule:: dataclass_wizard.errors
86 | :members:
87 | :undoc-members:
88 | :show-inheritance:
89 |
90 | dataclass\_wizard.lazy\_imports module
91 | --------------------------------------
92 |
93 | .. automodule:: dataclass_wizard.lazy_imports
94 | :members:
95 | :undoc-members:
96 | :show-inheritance:
97 |
98 | dataclass\_wizard.loader\_selection module
99 | ------------------------------------------
100 |
101 | .. automodule:: dataclass_wizard.loader_selection
102 | :members:
103 | :undoc-members:
104 | :show-inheritance:
105 |
106 | dataclass\_wizard.loaders module
107 | --------------------------------
108 |
109 | .. automodule:: dataclass_wizard.loaders
110 | :members:
111 | :undoc-members:
112 | :show-inheritance:
113 |
114 | dataclass\_wizard.log module
115 | ----------------------------
116 |
117 | .. automodule:: dataclass_wizard.log
118 | :members:
119 | :undoc-members:
120 | :show-inheritance:
121 |
122 | dataclass\_wizard.models module
123 | -------------------------------
124 |
125 | .. automodule:: dataclass_wizard.models
126 | :members:
127 | :undoc-members:
128 | :show-inheritance:
129 |
130 | dataclass\_wizard.parsers module
131 | --------------------------------
132 |
133 | .. automodule:: dataclass_wizard.parsers
134 | :members:
135 | :undoc-members:
136 | :show-inheritance:
137 |
138 | dataclass\_wizard.property\_wizard module
139 | -----------------------------------------
140 |
141 | .. automodule:: dataclass_wizard.property_wizard
142 | :members:
143 | :undoc-members:
144 | :show-inheritance:
145 |
146 | dataclass\_wizard.serial\_json module
147 | -------------------------------------
148 |
149 | .. automodule:: dataclass_wizard.serial_json
150 | :members:
151 | :undoc-members:
152 | :show-inheritance:
153 |
154 | dataclass\_wizard.type\_def module
155 | ----------------------------------
156 |
157 | .. automodule:: dataclass_wizard.type_def
158 | :members:
159 | :undoc-members:
160 | :show-inheritance:
161 |
162 | dataclass\_wizard.wizard\_mixins module
163 | ---------------------------------------
164 |
165 | .. automodule:: dataclass_wizard.wizard_mixins
166 | :members:
167 | :undoc-members:
168 | :show-inheritance:
169 |
170 | Module contents
171 | ---------------
172 |
173 | .. automodule:: dataclass_wizard
174 | :members:
175 | :undoc-members:
176 | :show-inheritance:
177 |
--------------------------------------------------------------------------------
/dataclass_wizard/wizard_mixins.pyi:
--------------------------------------------------------------------------------
1 | __all__ = ['JSONListWizard',
2 | 'JSONFileWizard',
3 | 'TOMLWizard',
4 | 'YAMLWizard']
5 |
6 | import json
7 | from os import PathLike
8 | from typing import AnyStr, TextIO, BinaryIO, Union, TypeAlias
9 |
10 | from .abstractions import W
11 | from .enums import LetterCase
12 | from .models import Container
13 | from .serial_json import JSONSerializable, SerializerHookMixin
14 | from .type_def import (T, ListOfJSONObject,
15 | Encoder, Decoder, FileDecoder, FileEncoder, ParseFloat)
16 |
17 |
18 | # A type that can be string or `path.Path`
19 | # https://stackoverflow.com/a/78070015/10237506
20 | # A type that can be string, bytes, or `PathLike`
21 | FileType: TypeAlias = str | bytes | PathLike
22 |
23 |
24 | class JSONListWizard(JSONSerializable, str=False):
25 |
26 | @classmethod
27 | def from_json(cls: type[W], string: AnyStr, *,
28 | decoder: Decoder = json.loads,
29 | **decoder_kwargs) -> W | Container[W]:
30 |
31 | ...
32 |
33 | @classmethod
34 | def from_list(cls: type[W], o: ListOfJSONObject) -> Container[W]:
35 | ...
36 |
37 |
38 | class JSONFileWizard(SerializerHookMixin):
39 |
40 | @classmethod
41 | def from_json_file(cls: type[T], file: FileType, *,
42 | decoder: FileDecoder = json.load,
43 | **decoder_kwargs) -> T | list[T]:
44 | ...
45 |
46 | def to_json_file(self: T, file: FileType, mode: str = 'w',
47 | encoder: FileEncoder = json.dump,
48 | **encoder_kwargs) -> None:
49 | ...
50 |
51 |
52 | class TOMLWizard(SerializerHookMixin):
53 |
54 | def __init_subclass__(cls, key_transform=LetterCase.NONE):
55 | ...
56 |
57 | @classmethod
58 | def from_toml(cls: type[T],
59 | string_or_stream: AnyStr | BinaryIO, *,
60 | decoder: Decoder | None = None,
61 | header: str = 'items',
62 | parse_float: ParseFloat = float) -> T | list[T]:
63 | ...
64 |
65 | @classmethod
66 | def from_toml_file(cls: type[T], file: FileType, *,
67 | decoder: FileDecoder | None = None,
68 | header: str = 'items',
69 | parse_float: ParseFloat = float) -> T | list[T]:
70 | ...
71 |
72 | def to_toml(self: T,
73 | /,
74 | *encoder_args,
75 | encoder: Encoder | None = None,
76 | multiline_strings: bool = False,
77 | indent: int = 4) -> AnyStr:
78 | ...
79 |
80 | def to_toml_file(self: T, file: FileType, mode: str = 'wb',
81 | encoder: FileEncoder | None = None,
82 | multiline_strings: bool = False,
83 | indent: int = 4) -> None:
84 | ...
85 |
86 | @classmethod
87 | def list_to_toml(cls: type[T],
88 | instances: list[T],
89 | header: str = 'items',
90 | encoder: Encoder | None = None,
91 | **encoder_kwargs) -> AnyStr:
92 | ...
93 |
94 |
95 | class YAMLWizard(SerializerHookMixin):
96 |
97 | def __init_subclass__(cls, key_transform=LetterCase.LISP):
98 | ...
99 |
100 | @classmethod
101 | def from_yaml(cls: type[T],
102 | string_or_stream: AnyStr | TextIO | BinaryIO, *,
103 | decoder: Decoder | None = None,
104 | **decoder_kwargs) -> T | list[T]:
105 | ...
106 |
107 | @classmethod
108 | def from_yaml_file(cls: type[T], file: FileType, *,
109 | decoder: FileDecoder | None = None,
110 | **decoder_kwargs) -> T | list[T]:
111 | ...
112 |
113 | def to_yaml(self: T, *,
114 | encoder: Encoder | None = None,
115 | **encoder_kwargs) -> AnyStr:
116 | ...
117 |
118 | def to_yaml_file(self: T, file: FileType, mode: str = 'w',
119 | encoder: FileEncoder | None = None,
120 | **encoder_kwargs) -> None:
121 | ...
122 |
123 | @classmethod
124 | def list_to_yaml(cls: type[T],
125 | instances: list[T],
126 | encoder: Encoder | None = None,
127 | **encoder_kwargs) -> AnyStr:
128 | ...
129 |
--------------------------------------------------------------------------------
/dataclass_wizard/bases_meta.pyi:
--------------------------------------------------------------------------------
1 | """
2 | Ideally should be in the `bases` module, however we'll run into a Circular
3 | Import scenario if we move it there, since the `loaders` and `dumpers` modules
4 | both import directly from `bases`.
5 |
6 | """
7 | from dataclasses import MISSING
8 | from datetime import tzinfo
9 | from typing import Sequence, Callable, Any, Mapping, Literal
10 |
11 | from .bases import AbstractMeta, META, AbstractEnvMeta, V1TypeToHook
12 | from .constants import TAG
13 | from .enums import DateTimeTo, LetterCase, LetterCasePriority
14 | from .v1.enums import KeyAction, KeyCase, DateTimeTo as V1DateTimeTo
15 | from .models import Condition
16 | from .type_def import E, EnvFileType
17 |
18 | _ALLOWED_MODES = Literal['runtime', 'v1_codegen']
19 |
20 | # global flag to determine if debug mode was ever enabled
21 | _debug_was_enabled = False
22 |
23 | V1HookFn = Callable[..., Any]
24 |
25 | def register_type(cls, tp: type, *,
26 | load: 'V1HookFn | None' = None,
27 | dump: 'V1HookFn | None' = None,
28 | mode: str | None = None) -> None: ...
29 |
30 |
31 | def _enable_debug_mode_if_needed(cls_loader, possible_lvl: bool | int | str):
32 | ...
33 |
34 |
35 | def _as_enum_safe(cls: type, name: str, base_type: type[E]) -> E | None:
36 | ...
37 |
38 |
39 | class BaseJSONWizardMeta(AbstractMeta):
40 |
41 | __slots__ = ()
42 |
43 | @classmethod
44 | def _init_subclass(cls):
45 | ...
46 |
47 | @classmethod
48 | def bind_to(cls, dataclass: type, create=True, is_default=True,
49 | base_loader=None, base_dumper=None):
50 | ...
51 |
52 |
53 | class BaseEnvWizardMeta(AbstractEnvMeta):
54 |
55 | __slots__ = ()
56 |
57 | @classmethod
58 | def _init_subclass(cls):
59 | ...
60 |
61 | @classmethod
62 | def bind_to(cls, env_class: type, create=True, is_default=True):
63 | ...
64 |
65 |
66 | # noinspection PyPep8Naming
67 | def LoadMeta(*, debug_enabled: 'bool | int | str' = MISSING,
68 | recursive: bool = True,
69 | recursive_classes: bool = MISSING,
70 | raise_on_unknown_json_key: bool = MISSING,
71 | json_key_to_field: dict[str, str] = MISSING,
72 | key_transform: LetterCase | str = MISSING,
73 | tag: str = MISSING,
74 | tag_key: str = TAG,
75 | auto_assign_tags: bool = MISSING,
76 | v1: bool = MISSING,
77 | v1_debug: bool | int | str = False,
78 | v1_type_to_load_hook: V1TypeToHook = None,
79 | v1_type_to_dump_hook: V1TypeToHook = None,
80 | v1_case: KeyCase | str | None = MISSING,
81 | v1_field_to_alias: dict[str, str | Sequence[str]] = MISSING,
82 | v1_on_unknown_key: KeyAction | str | None = KeyAction.IGNORE,
83 | v1_unsafe_parse_dataclass_in_union: bool = MISSING) -> META:
84 | ...
85 |
86 |
87 | # noinspection PyPep8Naming
88 | def DumpMeta(*, debug_enabled: 'bool | int | str' = MISSING,
89 | recursive: bool = True,
90 | marshal_date_time_as: DateTimeTo | str = MISSING,
91 | key_transform: LetterCase | str = MISSING,
92 | tag: str = MISSING,
93 | skip_defaults: bool = MISSING,
94 | skip_if: Condition = MISSING,
95 | skip_defaults_if: Condition = MISSING,
96 | v1: bool = MISSING,
97 | v1_debug: bool | int | str = False,
98 | v1_case: KeyCase | str | None = MISSING,
99 | v1_field_to_alias: dict[str, str | Sequence[str]] = MISSING,
100 | v1_dump_date_time_as: V1DateTimeTo | str = MISSING,
101 | v1_assume_naive_datetime_tz: tzinfo | None = None,
102 | ) -> META:
103 | ...
104 |
105 |
106 | # noinspection PyPep8Naming
107 | def EnvMeta(*, debug_enabled: 'bool | int | str' = MISSING,
108 | env_file: EnvFileType = MISSING,
109 | env_prefix: str = MISSING,
110 | secrets_dir: 'EnvFileType | Sequence[EnvFileType]' = MISSING,
111 | field_to_env_var: dict[str, str] = MISSING,
112 | key_lookup_with_load: LetterCasePriority | str = LetterCasePriority.SCREAMING_SNAKE,
113 | key_transform_with_dump: LetterCase | str = LetterCase.SNAKE,
114 | # marshal_date_time_as: DateTimeTo | str = MISSING,
115 | skip_defaults: bool = MISSING,
116 | skip_if: Condition = MISSING,
117 | skip_defaults_if: Condition = MISSING,
118 | ) -> META:
119 | ...
120 |
--------------------------------------------------------------------------------
/Makefile:
--------------------------------------------------------------------------------
1 | .PHONY: clean clean-test clean-pyc clean-build docs help bump-patch bump-minor bump-major bump-patch-dry
2 | .DEFAULT_GOAL := help
3 |
4 | define BROWSER_PYSCRIPT
5 | import os, webbrowser, sys
6 |
7 | from urllib.request import pathname2url
8 |
9 | webbrowser.open("file://" + pathname2url(os.path.abspath(sys.argv[1])))
10 | endef
11 | export BROWSER_PYSCRIPT
12 |
13 | define PRINT_HELP_PYSCRIPT
14 | import re, sys
15 |
16 | for line in sys.stdin:
17 | match = re.match(r'^([a-zA-Z_-]+):.*?## (.*)$$', line)
18 | if match:
19 | target, help = match.groups()
20 | print("%-20s %s" % (target, help))
21 | endef
22 | export PRINT_HELP_PYSCRIPT
23 |
24 | BROWSER := python -c "$$BROWSER_PYSCRIPT"
25 |
26 | help:
27 | @python -c "$$PRINT_HELP_PYSCRIPT" < $(MAKEFILE_LIST)
28 |
29 | init: ## install all dev dependencies for this project
30 | pip install -e .[dev]
31 | python -m pip install pre-commit build twine
32 | pre-commit install
33 |
34 | bump-patch:
35 | bump-my-version bump patch
36 |
37 | bump-minor:
38 | bump-my-version bump minor
39 |
40 | bump-major:
41 | bump-my-version bump major
42 |
43 | bump-patch-dry:
44 | bump-my-version bump patch --dry-run --verbose
45 |
46 | clean: clean-build clean-pyc clean-test ## remove all build, test, coverage and Python artifacts
47 |
48 | clean-build: ## remove build artifacts
49 | rm -fr build/
50 | rm -fr dist/
51 | rm -fr .eggs/
52 | find . -name '*.egg-info' -exec rm -fr {} +
53 | find . -name '*.egg' -type f -exec rm -f {} +
54 |
55 | clean-pyc: ## remove Python file artifacts
56 | find . -name '*.pyc' -exec rm -f {} +
57 | find . -name '*.pyo' -exec rm -f {} +
58 | find . -name '*~' -exec rm -f {} +
59 | find . -name '__pycache__' -exec rm -fr {} +
60 |
61 | clean-test: ## remove test and coverage artifacts
62 | rm -fr .tox/
63 | rm -f .coverage
64 | rm -fr htmlcov/
65 | rm -fr .pytest_cache
66 |
67 | lint: ## check style with flake8 and pylint
68 | flake8 dataclass_wizard tests
69 | pylint dataclass_wizard tests
70 |
71 | test: ## run unit tests quickly with the default Python
72 | pytest -v --cov=dataclass_wizard --cov-report=term-missing tests/unit
73 |
74 | test-vb: ## run unit tests (in verbose mode) with the default Python
75 | pytest -vvv --log-cli-level=DEBUG --capture=tee-sys --cov=dataclass_wizard --cov-report=term-missing tests/unit
76 |
77 | test-all: ## run tests on every Python version with tox
78 | tox
79 |
80 | coverage: ## check code coverage with unit tests quickly with the default Python
81 | coverage run --source dataclass_wizard -m pytest tests/unit
82 | coverage report -m
83 | coverage html
84 | $(BROWSER) htmlcov/index.html
85 |
86 | docs: ## generate Sphinx HTML documentation, including API docs
87 | rm -f docs/dataclass_wizard.rst
88 | rm -f docs/modules.rst
89 | sphinx-apidoc -o docs/ dataclass_wizard
90 | $(MAKE) -C docs clean
91 | $(MAKE) -C docs html
92 | $(BROWSER) docs/_build/html/index.html
93 |
94 | servedocs: docs ## compile the docs watching for changes
95 | watchmedo shell-command -p '*.rst' -c '$(MAKE) -C docs html' -R -D .
96 |
97 | release: dist ## package and upload a release
98 | twine upload dist/*
99 |
100 | check-dist:
101 | python -m build
102 | python -m twine check dist/*
103 |
104 | check: dist-local ## verify release before upload to PyPI
105 | twine check dist/*
106 |
107 | dist: clean ## builds source and wheel package
108 | pip install build
109 | python -m build
110 | ls -l dist
111 |
112 | dist-local: clean replace_version ## builds source and wheel package (for local testing)
113 | pip install build
114 | python -m build
115 | ls -l dist
116 | $(MAKE) revert_readme
117 |
118 | replace_version: ## replace |version| in README.rst with the current version
119 | cp README.rst README.rst.bak
120 | python -c "import re; \
121 | from pathlib import Path; \
122 | version = re.search(r\"__version__\\s*=\\s*'(.+?)'\", Path('dataclass_wizard/__version__.py').read_text()).group(1); \
123 | readme_path = Path('README.rst'); \
124 | readme_content = readme_path.read_text(); \
125 | readme_path.write_text(readme_content.replace('|version|', version)); \
126 | print(f'Replaced version in {readme_path}: {version}')"
127 |
128 | revert_readme: ## revert README.rst to its original state
129 | mv README.rst.bak README.rst
130 |
131 | install: clean ## install the package to the active Python's site-packages
132 | pip install .
133 |
134 | dist-conda: clean ## builds source and wheel package for Anaconda
135 | conda build .
136 |
137 | release-conda: dist-conda ## package and upload a release to Anaconda
138 | $(eval DIST_FILE=$(shell conda build . --output))
139 | anaconda upload $(DIST_FILE)
140 |
--------------------------------------------------------------------------------
/dataclass_wizard/utils/dict_helper.py:
--------------------------------------------------------------------------------
1 | """
2 | Dict helper module
3 | """
4 |
5 |
6 | class NestedDict(dict):
7 | """
8 | A dictionary that automatically creates nested dictionaries for missing keys.
9 |
10 | This class extends the built-in `dict` to simplify working with deeply nested structures.
11 | If a key is accessed but does not exist, it will be created automatically with a new `NestedDict` as its value.
12 |
13 | Source: https://stackoverflow.com/a/5369984/10237506
14 |
15 | Example:
16 | >>> nd = NestedDict()
17 | >>> nd['a']['b']['c'] = 42
18 | >>> nd
19 | {'a': {'b': {'c': 42}}}
20 |
21 | >>> nd['x']['y']
22 | {}
23 | """
24 |
25 | __slots__ = ()
26 |
27 | def __getitem__(self, key):
28 | """
29 | Retrieve the value for a key, or create a nested dictionary for missing keys.
30 |
31 | Args:
32 | key (Hashable): The key to retrieve or create.
33 |
34 | Returns:
35 | Any: The value associated with the key, or a new `NestedDict` for missing keys.
36 |
37 | Example:
38 | >>> nd = NestedDict()
39 | >>> nd['foo'] # Creates a new NestedDict for 'foo'
40 | {}
41 |
42 | Note:
43 | If the key exists, its value is returned. Otherwise, a new `NestedDict` is created,
44 | stored, and returned.
45 | """
46 | if key in self: return self.get(key)
47 | return self.setdefault(key, NestedDict())
48 |
49 |
50 | class DictWithLowerStore(dict):
51 | """
52 | A ``dict``-like object with a lower-cased key store.
53 |
54 | All keys are expected to be strings. The structure remembers the
55 | case of the lower-cased key to be set, and methods like ``get()``
56 | and ``get_key()`` will use the lower-cased store. However, querying
57 | and contains testing is case sensitive::
58 |
59 | dls = DictWithLowerStore()
60 | dls['Accept'] = 'application/json'
61 | dls['aCCEPT'] == 'application/json' # False (raises KeyError)
62 | dls['Accept'] == 'application/json' # True
63 | dls.get('aCCEPT') == 'application/json' # True
64 |
65 | dls.get_key('aCCEPT') == 'Accept' # True
66 | list(dls) == ['Accept'] # True
67 |
68 | .. NOTE::
69 | I don't want to use the `CaseInsensitiveDict` from
70 | `request.structures`, because it turns out the lookup via that dict
71 | implementation is rather slow. So this version is somewhat of a
72 | trade-off, where I retain the same speed on lookups as a plain `dict`,
73 | but I also have a lower-cased key store, in case I ever need to use it.
74 |
75 | """
76 | __slots__ = ('_lower_store', )
77 |
78 | def __init__(self, data=None, **kwargs):
79 | super().__init__()
80 | self._lower_store = {}
81 | if data is None:
82 | data = {}
83 | self.update(data, **kwargs)
84 |
85 | def __setitem__(self, key, value):
86 | super().__setitem__(key, value)
87 | # Store the lower-cased key for lookups via `get`. Also store the
88 | # actual key alongside the value.
89 | self._lower_store[key.lower()] = (key, value)
90 |
91 | def get_key(self, key) -> str:
92 | """Return the original cased key"""
93 | return self._lower_store[key.lower()][0]
94 |
95 | def get(self, key):
96 | """
97 | Do a case-insensitive lookup. This lower-cases `key` and looks up
98 | from the lower-cased key store.
99 | """
100 | try:
101 | return self.__getitem__(key)
102 | except KeyError:
103 | return self._lower_store[key.lower()][1]
104 |
105 | def __delitem__(self, key):
106 | lower_key = key.lower()
107 | actual_key, _ = self._lower_store[lower_key]
108 |
109 | del self[actual_key]
110 | del self._lower_store[lower_key]
111 |
112 | def lower_items(self):
113 | """Like iteritems(), but with all lowercase keys."""
114 | return (
115 | (lowerkey, keyval[1])
116 | for (lowerkey, keyval)
117 | in self._lower_store.items()
118 | )
119 |
120 | def __eq__(self, other):
121 | if isinstance(other, dict):
122 | other = DictWithLowerStore(other)
123 | else:
124 | return NotImplemented
125 | # Compare insensitively
126 | return dict(self.lower_items()) == dict(other.lower_items())
127 |
128 | def update(self, *args, **kwargs):
129 | if len(args) > 1:
130 | raise TypeError("update expected at most 1 arguments, got %d" % len(args))
131 | other = dict(*args, **kwargs)
132 | for key in other:
133 | self[key] = other[key]
134 |
135 | def copy(self):
136 | return DictWithLowerStore(self._lower_store.values())
137 |
138 | def __repr__(self):
139 | return str(dict(self.items()))
140 |
--------------------------------------------------------------------------------
/tests/unit/v1/test_hooks.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | import pytest
4 |
5 | from dataclasses import dataclass
6 | from ipaddress import IPv4Address
7 |
8 | from dataclass_wizard import register_type, JSONWizard, LoadMeta, fromdict, asdict
9 | from dataclass_wizard.errors import ParseError
10 | from dataclass_wizard.v1 import DumpMixin, LoadMixin
11 | from dataclass_wizard.v1.models import TypeInfo, Extras
12 |
13 |
14 | def test_v1_register_type_ipv4address_roundtrip():
15 |
16 | @dataclass
17 | class Foo(JSONWizard):
18 | class Meta(JSONWizard.Meta):
19 | v1 = True
20 |
21 | b: bytes = b""
22 | s: str | None = None
23 | c: IPv4Address | None = None
24 |
25 | Foo.register_type(IPv4Address)
26 |
27 | data = {"b": "AAAA", "c": "127.0.0.1", "s": "foobar"}
28 |
29 | foo = Foo.from_dict(data)
30 | assert foo.c == IPv4Address("127.0.0.1")
31 |
32 | assert foo.to_dict() == data
33 | assert Foo.from_dict(foo.to_dict()).to_dict() == data
34 |
35 |
36 | def test_v1_ipv4address_without_hook_raises_parse_error():
37 |
38 | @dataclass
39 | class Foo(JSONWizard):
40 | class Meta(JSONWizard.Meta):
41 | v1 = True
42 |
43 | c: IPv4Address | None = None
44 |
45 | data = {"c": "127.0.0.1"}
46 |
47 | with pytest.raises(ParseError) as e:
48 | Foo.from_dict(data)
49 |
50 | assert e.value.phase == 'load'
51 |
52 | msg = str(e.value)
53 | assert "field `c`" in msg
54 | assert "not currently supported" in msg
55 | assert "IPv4Address" in msg
56 | assert "load" in msg.lower()
57 |
58 |
59 | def test_v1_meta_codegen_hooks_ipv4address_roundtrip():
60 |
61 | def load_to_ipv4_address(tp: TypeInfo, extras: Extras) -> str:
62 | return tp.wrap(tp.v(), extras)
63 |
64 | def dump_from_ipv4_address(tp: TypeInfo, extras: Extras) -> str:
65 | return f"str({tp.v()})"
66 |
67 | @dataclass
68 | class Foo(JSONWizard):
69 | class Meta(JSONWizard.Meta):
70 | v1 = True
71 | v1_type_to_load_hook = {IPv4Address: load_to_ipv4_address}
72 | v1_type_to_dump_hook = {IPv4Address: dump_from_ipv4_address}
73 |
74 | b: bytes = b""
75 | s: str | None = None
76 | c: IPv4Address | None = None
77 |
78 | data = {"b": "AAAA", "c": "127.0.0.1", "s": "foobar"}
79 |
80 | foo = Foo.from_dict(data)
81 | assert foo.c == IPv4Address("127.0.0.1")
82 |
83 | assert foo.to_dict() == data
84 | assert Foo.from_dict(foo.to_dict()).to_dict() == data
85 |
86 |
87 | def test_v1_meta_runtime_hooks_ipv4address_roundtrip():
88 |
89 | @dataclass
90 | class Foo(JSONWizard):
91 | class Meta(JSONWizard.Meta):
92 | v1 = True
93 | v1_type_to_load_hook = {IPv4Address: ('runtime', IPv4Address)}
94 | v1_type_to_dump_hook = {IPv4Address: ('runtime', str)}
95 |
96 | b: bytes = b""
97 | s: str | None = None
98 | c: IPv4Address | None = None
99 |
100 | data = {"b": "AAAA", "c": "127.0.0.1", "s": "foobar"}
101 |
102 | foo = Foo.from_dict(data)
103 | assert foo.c == IPv4Address("127.0.0.1")
104 |
105 | assert foo.to_dict() == data
106 | assert Foo.from_dict(foo.to_dict()).to_dict() == data
107 |
108 | # invalid modes should raise an error
109 | with pytest.raises(ValueError) as e:
110 | meta = LoadMeta(v1_type_to_load_hook={IPv4Address: ('RT', str)})
111 | meta.bind_to(Foo)
112 | assert "mode must be 'runtime' or 'v1_codegen' (got 'RT')" in str(e.value)
113 |
114 |
115 | def test_v1_register_type_no_inheritance_with_functional_api_roundtrip():
116 | @dataclass
117 | class Foo:
118 | b: bytes = b""
119 | s: str | None = None
120 | c: IPv4Address | None = None
121 |
122 | LoadMeta(v1=True).bind_to(Foo)
123 |
124 | register_type(Foo, IPv4Address)
125 |
126 | data = {"b": "AAAA", "c": "127.0.0.1", "s": "foobar"}
127 |
128 | foo = fromdict(Foo, data)
129 | assert foo.c == IPv4Address("127.0.0.1")
130 |
131 | assert asdict(foo) == data
132 | assert asdict(fromdict(Foo, asdict(foo))) == data
133 |
134 |
135 | def test_v1_ipv4address_hooks_with_load_and_dump_mixins_roundtrip():
136 | @dataclass
137 | class Foo(JSONWizard, DumpMixin, LoadMixin):
138 | class Meta(JSONWizard.Meta):
139 | v1 = True
140 |
141 | c: IPv4Address | None = None
142 |
143 | @classmethod
144 | def load_to_ipv4_address(cls, tp: TypeInfo, extras: Extras) -> str:
145 | return tp.wrap(tp.v(), extras)
146 |
147 | @classmethod
148 | def dump_from_ipv4_address(cls, tp: TypeInfo, extras: Extras) -> str:
149 | return f"str({tp.v()})"
150 |
151 | Foo.register_load_hook(IPv4Address, Foo.load_to_ipv4_address)
152 | Foo.register_dump_hook(IPv4Address, Foo.dump_from_ipv4_address)
153 |
154 | data = {"c": "127.0.0.1"}
155 |
156 | foo = Foo.from_dict(data)
157 | assert foo.c == IPv4Address("127.0.0.1")
158 |
159 | assert foo.to_dict() == data
160 | assert Foo.from_dict(foo.to_dict()).to_dict() == data
161 |
--------------------------------------------------------------------------------
/dataclass_wizard/__init__.py:
--------------------------------------------------------------------------------
1 | """
2 | Dataclass Wizard
3 | ~~~~~~~~~~~~~~~~
4 |
5 | Lightning-fast JSON wizardry for Python dataclasses — effortless
6 | serialization right out of the box!
7 |
8 | Sample Usage:
9 |
10 | >>> from dataclasses import dataclass, field
11 | >>> from datetime import datetime
12 | >>> from typing import Optional
13 | >>>
14 | >>> from dataclass_wizard import JSONSerializable, property_wizard
15 | >>>
16 | >>>
17 | >>> @dataclass
18 | >>> class MyClass(JSONSerializable, metaclass=property_wizard):
19 | >>>
20 | >>> my_str: Optional[str]
21 | >>> list_of_int: list[int] = field(default_factory=list)
22 | >>> # You can also define this as `my_dt`, however only the annotation
23 | >>> # will carry over in that case, since the value is re-declared by
24 | >>> # the property below.
25 | >>> _my_dt: datetime = datetime(2000, 1, 1)
26 | >>>
27 | >>> @property
28 | >>> def my_dt(self):
29 | >>> # A sample `getter` which returns the datetime with year set as 2010
30 | >>> if self._my_dt is not None:
31 | >>> return self._my_dt.replace(year=2010)
32 | >>> return self._my_dt
33 | >>>
34 | >>> @my_dt.setter
35 | >>> def my_dt(self, new_dt: datetime):
36 | >>> # A sample `setter` which sets the inverse (roughly) of the `month` and `day`
37 | >>> self._my_dt = new_dt.replace(month=13 - new_dt.month,
38 | >>> day=30 - new_dt.day)
39 | >>>
40 | >>>
41 | >>> string = '''{"myStr": 42, "listOFInt": [1, "2", 3]}'''
42 | >>> c = MyClass.from_json(string)
43 | >>> print(repr(c))
44 | >>> # prints:
45 | >>> # MyClass(
46 | >>> # my_str='42',
47 | >>> # list_of_int=[1, 2, 3],
48 | >>> # my_dt=datetime.datetime(2010, 12, 29, 0, 0)
49 | >>> # )
50 | >>> my_dict = {'My_Str': 'string', 'myDT': '2021-01-20T15:55:30Z'}
51 | >>> c = MyClass.from_dict(my_dict)
52 | >>> print(repr(c))
53 | >>> # prints:
54 | >>> # MyClass(
55 | >>> # my_str='string',
56 | >>> # list_of_int=[],
57 | >>> # my_dt=datetime.datetime(2010, 12, 10, 15, 55, 30,
58 | >>> # tzinfo=datetime.timezone.utc)
59 | >>> # )
60 | >>> print(c.to_json())
61 | >>> # prints:
62 | >>> # {"myStr": "string", "listOfInt": [], "myDt": "2010-12-10T15:55:30Z"}
63 |
64 | For full documentation and more advanced usage, please see
65 | .
66 |
67 | :copyright: (c) 2021-2025 by Ritvik Nag.
68 | :license: Apache 2.0, see LICENSE for more details.
69 | """
70 |
71 | __all__ = [
72 | # Base exports
73 | 'DataclassWizard',
74 | 'JSONSerializable',
75 | 'JSONPyWizard',
76 | 'JSONWizard',
77 | 'register_type',
78 | 'LoadMixin',
79 | 'DumpMixin',
80 | 'property_wizard',
81 | # Wizard Mixins
82 | 'EnvWizard',
83 | 'JSONListWizard',
84 | 'JSONFileWizard',
85 | 'TOMLWizard',
86 | 'YAMLWizard',
87 | # Helper serializer functions + meta config
88 | 'fromlist',
89 | 'fromdict',
90 | 'asdict',
91 | 'LoadMeta',
92 | 'DumpMeta',
93 | 'EnvMeta',
94 | # Models
95 | 'env_field',
96 | 'json_field',
97 | 'json_key',
98 | 'path_field',
99 | 'skip_if_field',
100 | 'KeyPath',
101 | 'Container',
102 | 'Pattern',
103 | 'DatePattern',
104 | 'TimePattern',
105 | 'DateTimePattern',
106 | 'CatchAll',
107 | 'SkipIf',
108 | 'SkipIfNone',
109 | 'EQ',
110 | 'NE',
111 | 'LT',
112 | 'LE',
113 | 'GT',
114 | 'GE',
115 | 'IS',
116 | 'IS_NOT',
117 | 'IS_TRUTHY',
118 | 'IS_FALSY',
119 | ]
120 |
121 | import logging
122 |
123 | from .bases_meta import LoadMeta, DumpMeta, EnvMeta, register_type
124 | from .constants import PACKAGE_NAME
125 | from .dumpers import DumpMixin, setup_default_dumper
126 | from .loaders import LoadMixin, setup_default_loader
127 | from .loader_selection import asdict, fromlist, fromdict
128 | from .models import (env_field, json_field, json_key, path_field, skip_if_field,
129 | KeyPath, Container,
130 | Pattern, DatePattern, TimePattern, DateTimePattern,
131 | CatchAll, SkipIf, SkipIfNone,
132 | EQ, NE, LT, LE, GT, GE, IS, IS_NOT, IS_TRUTHY, IS_FALSY)
133 | from .environ.wizard import EnvWizard
134 | from .property_wizard import property_wizard
135 | from .serial_json import DataclassWizard, JSONWizard, JSONPyWizard, JSONSerializable
136 | from .wizard_mixins import JSONListWizard, JSONFileWizard, TOMLWizard, YAMLWizard
137 |
138 |
139 | # Set up logging to ``/dev/null`` like a library is supposed to.
140 | # http://docs.python.org/3.3/howto/logging.html#configuring-logging-for-a-library
141 | logging.getLogger(PACKAGE_NAME).addHandler(logging.NullHandler())
142 |
143 | # Setup the default type hooks to use when converting `str` (json) or a Python
144 | # `dict` object to a `dataclass` instance.
145 | setup_default_loader()
146 |
147 | # Setup the default type hooks to use when converting `dataclass` instances to
148 | # a JSON `string` or a Python `dict` object.
149 | setup_default_dumper()
150 |
--------------------------------------------------------------------------------
/docs/common_use_cases/handling_unknown_json_keys.rst:
--------------------------------------------------------------------------------
1 | Handling Unknown JSON Keys
2 | ###########################
3 |
4 | When working with JSON data, you may encounter unknown or extraneous keys -- those that do not map to any defined dataclass fields.
5 | This guide explains the default behavior, how to raise errors for unknown keys, and how to capture them using a ``CatchAll`` field.
6 |
7 | Default Behavior
8 | ================
9 |
10 | By default, when unknown JSON keys are encountered during the de-serialization process
11 | (using ``from_dict`` or ``from_json``), the library emits a warning if *debug* mode is enabled
12 | and logging is properly configured. These keys are ignored and not included in the resulting object.
13 |
14 | However, you can customize this behavior to raise an error or capture unknown data.
15 |
16 | Raising Errors on Unknown Keys
17 | ==============================
18 |
19 | To enforce strict validation, you can configure the library to raise an error when
20 | unknown keys are encountered. This is useful when you need to ensure that all JSON
21 | data adheres to a specific schema.
22 |
23 | Example: Raising an Error
24 | --------------------------
25 |
26 | The example below demonstrates how to configure the library to raise a
27 | ``UnknownJSONKey`` error when unknown keys are encountered.
28 |
29 | .. code:: python3
30 |
31 | import logging
32 | from dataclasses import dataclass
33 |
34 | from dataclass_wizard import JSONWizard
35 | from dataclass_wizard.errors import UnknownJSONKey
36 |
37 | # Sets up application logging if we haven't already done so
38 | logging.basicConfig(level='INFO')
39 |
40 |
41 | @dataclass
42 | class Container(JSONWizard):
43 | class _(JSONWizard.Meta):
44 | debug_enabled = 'INFO'
45 | raise_on_unknown_json_key = True
46 |
47 | element: 'MyElement'
48 |
49 |
50 | @dataclass
51 | class MyElement:
52 | my_str: str
53 | my_float: float
54 |
55 |
56 | d = {
57 | 'element': {
58 | 'myStr': 'string',
59 | 'my_float': '1.23',
60 | 'my_bool': 'Testing' # This key is not mapped to a known dataclass field!
61 | }
62 | }
63 |
64 | try:
65 | c = Container.from_dict(d)
66 | except UnknownJSONKey as e:
67 | print('Error:', e)
68 |
69 | # Expected Output:
70 | # > Error: A JSON key is missing from the dataclass schema for class `MyElement`.
71 | # unknown key: 'my_bool'
72 | # dataclass fields: ['my_str', 'my_float']
73 | # input JSON object: {"myStr": "string", "my_float": "1.23", "my_bool": "Testing"}
74 |
75 | Capturing Unknown Keys with ``CatchAll``
76 | ========================================
77 |
78 | Starting from version **v0.29**, unknown JSON keys can be captured into a designated field
79 | using the ``CatchAll`` type. This allows you to store all unmapped key-value pairs for
80 | later use, without discarding them.
81 |
82 | Example: Capturing Unknown Keys
83 | -------------------------------
84 |
85 | The following example demonstrates how to use a ``CatchAll`` field to capture
86 | unknown JSON keys during de-serialization.
87 |
88 | .. code:: python
89 |
90 | from dataclasses import dataclass
91 | from dataclass_wizard import CatchAll, JSONWizard
92 |
93 |
94 | @dataclass
95 | class MyData(JSONWizard):
96 | class _(JSONWizard.Meta):
97 | skip_defaults = True
98 |
99 | my_str: str
100 | my_float: float
101 | extra_data: CatchAll = False # Initialize with a default value.
102 |
103 |
104 | # Case 1: JSON object with extra data
105 | input_dict = {
106 | 'my_str': "test",
107 | 'my_float': 3.14,
108 | 'my_other_str': "test!",
109 | 'my_bool': True
110 | }
111 |
112 | data = MyData.from_dict(input_dict)
113 |
114 | print(data.extra_data)
115 | # > {'my_other_str': 'test!', 'my_bool': True}
116 |
117 | # Save back to JSON
118 | output_dict = data.to_dict()
119 |
120 | print(output_dict)
121 | # > {'myStr': 'test', 'myFloat': 3.14, 'my_other_str': 'test!', 'my_bool': True}
122 |
123 | # Case 2: JSON object without extra data
124 | input_dict = {
125 | 'my_str': "test",
126 | 'my_float': 3.14,
127 | }
128 |
129 | data = MyData.from_dict(input_dict)
130 |
131 | print(data.extra_data)
132 | # > False
133 |
134 | Key Points:
135 | -----------
136 |
137 | - The ``extra_data`` field automatically captures all unknown JSON keys.
138 | - If no extra data is present, the field defaults to ``False`` in this example.
139 | - When serialized back to JSON, the extra data is retained.
140 |
141 | Best Practices
142 | ==============
143 |
144 | - Use ``raise_on_unknown_json_key`` when strict validation of JSON data is required.
145 | - Use ``CatchAll`` to gracefully handle dynamic or extensible JSON data structures.
146 | - Combine both features for advanced use cases, such as logging unknown keys
147 | while capturing them into a designated field.
148 |
149 | ---
150 |
151 | This guide offers a comprehensive overview of handling unknown JSON keys.
152 | By customizing the behavior, you can ensure your application works seamlessly
153 | with various JSON structures, whether strict or dynamic.
154 |
--------------------------------------------------------------------------------
/docs/python_compatibility.rst:
--------------------------------------------------------------------------------
1 | .. highlight:: shell
2 |
3 | ================
4 | Py Compatibility
5 | ================
6 |
7 | Python 3.9+
8 | -----------
9 |
10 | Just a quick note that even though this library supports Python 3.9+,
11 | some of the new features introduced in the latest Python
12 | versions might not be available from the ``typing`` module, depending on
13 | the Python version installed.
14 |
15 | To work around that, there's a great library called ``typing-extensions`` (you can
16 | find it on PyPI `here`_) that backports all the new
17 | ``typing`` features introduced so that earlier Python versions can also
18 | benefit from them. Note that the ``dataclass-wizard`` package already requires
19 | this dependency for **Python version 3.10 or earlier**, so there's no need
20 | to install this library separately.
21 |
22 | With the ``typing-extensions`` module, you can take advantage of the
23 | following new types from the ``typing`` module for Python 3.9+. Most of them are currently
24 | supported by the ``JSONSerializable`` class, however the ones that are *not*
25 | are marked with an asterisk (``*``) below.
26 |
27 | Introduced in *Python 3.10*:
28 | * `is_typeddict`_
29 | * `Concatenate`_
30 | * `ParamSpec`_
31 | * `TypeAlias`_
32 | * `TypeGuard`_
33 |
34 | Introduced in *Python 3.9*:
35 | * `Annotated`_ (added by `PEP 593`_)
36 |
37 | Introduced in *Python 3.8*:
38 | * `Literal`_
39 | * `TypedDict`_
40 | * `Final`_ ``*``
41 |
42 |
43 | ``*`` - Currently not supported by ``JSONSerializable`` at this time, though this
44 | may change in a future release.
45 |
46 | .. _here: https://pypi.org/project/typing-extensions/
47 | .. _Annotated: https://docs.python.org/3.9/library/typing.html#typing.Annotated
48 | .. _PEP 593: https://www.python.org/dev/peps/pep-0593/
49 | .. _Final: https://docs.python.org/3.8/library/typing.html#typing.Final
50 | .. _Literal: https://docs.python.org/3.8/library/typing.html#typing.Literal
51 | .. _TypedDict: https://docs.python.org/3.8/library/typing.html#typing.TypedDict
52 | .. _TypeAlias: https://docs.python.org/3/library/typing.html#typing.TypeAlias
53 | .. _Concatenate: https://docs.python.org/3/library/typing.html#typing.Concatenate
54 | .. _TypeGuard: https://docs.python.org/3/library/typing.html#typing.TypeGuard
55 | .. _ParamSpec: https://docs.python.org/3/library/typing.html#typing.ParamSpec
56 | .. _is_typeddict: https://docs.python.org/3/library/typing.html#typing.is_typeddict
57 |
58 | Importing the New Types
59 | ~~~~~~~~~~~~~~~~~~~~~~~
60 |
61 | You can import the new types (for example, the ones mentioned above) using the below
62 | syntax:
63 |
64 | .. code-block:: python3
65 |
66 | from typing_extensions import Literal, TypedDict, Annotated
67 |
68 |
69 | Python 3.7+
70 | -----------
71 |
72 | The Dataclass Wizard library supports the parsing of *future annotations* (also
73 | known as forward-declared annotations) which are enabled via a
74 | ``from __future__ import annotations`` import added at the top of a module; this
75 | declaration allows `PEP 585`_ and `PEP 604`_- style annotations to be used in
76 | Python 3.7 and higher. The one main benefit, is that static type checkers and
77 | IDEs such as PyCharm appear to have solid support for using new-style
78 | annotations in this way.
79 |
80 | The following Python code illustrates the paradigm of future annotations in
81 | Python 3.7+ code; notice that a ``__future__`` import is added at the top, for
82 | compatibility with versions earlier than 3.10. In the annotations, we also prefer
83 | to use parameterized standard collections, and the new pipe ``|`` syntax to
84 | represent ``Union`` and ``Optional`` types.
85 |
86 | .. code:: python3
87 |
88 | from __future__ import annotations
89 |
90 | import datetime
91 | from dataclasses import dataclass
92 | from decimal import Decimal
93 |
94 | from dataclass_wizard import JSONWizard
95 |
96 |
97 | @dataclass
98 | class A(JSONWizard):
99 | field_1: str | int | bool
100 | field_2: int | tuple[str | int] | bool
101 | field_3: Decimal | datetime.date | str
102 | field_4: str | int | None
103 | field_6: dict[str | int, list[B | C | D | None]]
104 |
105 |
106 | @dataclass
107 | class B:
108 | ...
109 |
110 |
111 | @dataclass
112 | class C:
113 | ...
114 |
115 |
116 | @dataclass
117 | class D:
118 | ...
119 |
120 | The Latest and Greatest
121 | -----------------------
122 |
123 | If you already have Python 3.10 or higher, you can leverage the new support for parameterized
124 | standard collections that was added as part of `PEP 585`_, as well as the ability to write
125 | Union types as ``X | Y`` which is introduced in `PEP 604`_, and avoid these imports from
126 | the ``typing`` module altogether:
127 |
128 | .. code:: python3
129 |
130 | from collections import defaultdict
131 | from dataclasses import dataclass
132 |
133 | from dataclass_wizard import JSONWizard
134 |
135 |
136 | @dataclass
137 | class MyClass(JSONWizard):
138 | my_list: list[str]
139 | my_dict: defaultdict[str, list[int]]
140 | my_tuple: tuple[int | str, ...]
141 |
142 |
143 | if __name__ == '__main__':
144 | data = {'my_list': ['testing'], 'my_dict': {'key': [1, 2, '3']}, 'my_tuple': (1, '2')}
145 |
146 | c = MyClass.from_dict(data)
147 |
148 | print(repr(c))
149 | # prints:
150 | # MyClass(my_list=['testing'], my_dict=defaultdict(, {'key': [1, 2, 3]}), my_tuple=(1, '2'))
151 |
152 |
153 | .. _PEP 585: https://www.python.org/dev/peps/pep-0585/
154 | .. _PEP 604: https://www.python.org/dev/peps/pep-0604/
155 |
--------------------------------------------------------------------------------
/CONTRIBUTING.rst:
--------------------------------------------------------------------------------
1 | .. highlight:: shell
2 |
3 | ============
4 | Contributing
5 | ============
6 |
7 | Contributions are welcome, and they are greatly appreciated! Every little bit
8 | helps, and credit will always be given.
9 |
10 | You can contribute in many ways:
11 |
12 | Types of Contributions
13 | ----------------------
14 |
15 | Report Bugs
16 | ~~~~~~~~~~~
17 |
18 | Report bugs at https://github.com/rnag/dataclass-wizard/issues.
19 |
20 | If you are reporting a bug, please include:
21 |
22 | * Your operating system name and version.
23 | * Any details about your local setup that might be helpful in troubleshooting.
24 | * Detailed steps to reproduce the bug.
25 |
26 | Fix Bugs
27 | ~~~~~~~~
28 |
29 | Look through the GitHub issues for bugs. Anything tagged with "bug" and "help
30 | wanted" is open to whoever wants to implement it.
31 |
32 | Implement Features
33 | ~~~~~~~~~~~~~~~~~~
34 |
35 | Look through the GitHub issues for features. Anything tagged with "enhancement"
36 | and "help wanted" is open to whoever wants to implement it.
37 |
38 | Write Documentation
39 | ~~~~~~~~~~~~~~~~~~~
40 |
41 | Dataclass Wizard could always use more documentation, whether as part of the
42 | official Dataclass Wizard docs, in docstrings, or even on the web in blog posts,
43 | articles, and such.
44 |
45 | Submit Feedback
46 | ~~~~~~~~~~~~~~~
47 |
48 | The best way to send feedback is to file an issue at https://github.com/rnag/dataclass-wizard/issues.
49 |
50 | If you are proposing a feature:
51 |
52 | * Explain in detail how it would work.
53 | * Keep the scope as narrow as possible, to make it easier to implement.
54 | * Remember that this is a volunteer-driven project, and that contributions
55 | are welcome :)
56 |
57 | Get Started!
58 | ------------
59 |
60 | Ready to contribute? Here's how to set up `dataclass-wizard` for local development.
61 |
62 | 1. Fork the `dataclass-wizard` repo on GitHub.
63 | 2. Clone your fork locally::
64 |
65 | $ git clone git@github.com:your_name_here/dataclass-wizard.git
66 |
67 | 3. Install your local copy into a virtualenv. Assuming you have virtualenvwrapper installed, this is how you set up your fork for local development::
68 |
69 | $ mkvirtualenv dataclass-wizard
70 | $ cd dataclass-wizard/
71 | $ make init
72 |
73 | 4. Create a branch for local development::
74 |
75 | $ git checkout -b name-of-your-bugfix-or-feature
76 |
77 | Now you can make your changes locally.
78 |
79 | 5. When you're done making changes, check that your changes pass flake8 and the
80 | tests, including testing other Python versions with tox::
81 |
82 | $ make lint
83 | $ make test # or: see debug output with `make test-vb`
84 | $ tox
85 |
86 | To get flake8 and tox, just pip install them into your virtualenv.
87 |
88 | To instead run pytest in verbose mode `-vvv` and also show
89 | log output in terminal for debugging purposes, use::
90 |
91 | $ make test-vb
92 |
93 | 6. Commit your changes and push your branch to GitHub::
94 |
95 | $ git add .
96 | $ git commit -m "Your detailed description of your changes."
97 | $ git push origin name-of-your-bugfix-or-feature
98 |
99 | 7. Submit a pull request through the GitHub website.
100 |
101 | Pre-commit Hooks
102 | ----------------
103 |
104 | This project uses ``pre-commit`` to catch formatting, packaging, and
105 | documentation issues *before* they reach CI or PyPI.
106 |
107 | Setup (one-time)
108 | ~~~~~~~~~~~~~~~~
109 |
110 | Install the required tools and enable the git hooks:
111 |
112 | .. code-block:: bash
113 |
114 | python -m pip install pre-commit build twine
115 | pre-commit install
116 |
117 | After this, the hooks will run automatically on every commit.
118 |
119 | Running manually
120 | ~~~~~~~~~~~~~~~~
121 |
122 | To run all hooks against the entire repository (recommended after
123 | changing documentation or release metadata):
124 |
125 | .. code-block:: bash
126 |
127 | pre-commit run --all-files
128 |
129 | Pull Request Guidelines
130 | -----------------------
131 |
132 | Before you submit a pull request, check that it meets these guidelines:
133 |
134 | 1. The pull request should include tests.
135 | 2. If the pull request adds functionality, the docs should be updated. Put
136 | your new functionality into a function with a docstring, and add the
137 | feature to the list in README.rst.
138 | 3. The pull request should work for Python 3.9, 3.10, 3.11, 3.12, 3.13 and 3.14, and for PyPy. Check
139 | https://github.com/rnag/dataclass-wizard/actions/workflows/dev.yml
140 | and make sure that the tests pass for all supported Python versions.
141 |
142 | Tips
143 | ----
144 |
145 | To run a subset of tests::
146 |
147 | $ pytest tests/unit/test_dataclass_wizard.py::test_my_func
148 |
149 |
150 | Deploying
151 | ---------
152 |
153 | .. note:: **Tip:** The last command below is used to push both the commit and
154 | the new tag to the remote branch simultaneously. There is also a simpler
155 | alternative as mentioned in `this post`_, which involves running the following
156 | command::
157 |
158 | $ git config --global push.followTags true
159 |
160 | After that, you should be able to simply run the below command to push *both
161 | the commits and tags* simultaneously::
162 |
163 | $ git push
164 |
165 | A reminder for the maintainers on how to deploy.
166 | Make sure all your changes are committed (including an entry in HISTORY.rst).
167 | Then run::
168 |
169 | $ bump2version patch # possible: major / minor / patch
170 | $ git push && git push --tags
171 |
172 | GitHub Actions will then `deploy to PyPI`_ if tests pass.
173 |
174 | .. _`deploy to PyPI`: https://github.com/rnag/dataclass-wizard/actions/workflows/release.yml
175 | .. _`this post`: https://stackoverflow.com/questions/3745135/push-git-commits-tags-simultaneously
176 |
--------------------------------------------------------------------------------
/dataclass_wizard/environ/loaders.py:
--------------------------------------------------------------------------------
1 | from datetime import datetime, date, timezone
2 | from typing import (
3 | Type, Dict, List, Tuple, Iterable, Sequence,
4 | Union, AnyStr, Optional, Callable,
5 | )
6 |
7 | from ..abstractions import AbstractParser
8 | from ..bases import META
9 | from ..decorators import _single_arg_alias
10 | from ..loaders import LoadMixin, load_func_for_dataclass
11 | from ..type_def import (
12 | FrozenKeys, DefFactory, M, N, U, DD, LSQ, NT, T, JSONObject
13 | )
14 | from ..utils.type_conv import (
15 | as_datetime, as_date, as_list, as_dict
16 | )
17 |
18 |
19 | class EnvLoader(LoadMixin):
20 | """
21 | This Mixin class derives its name from the eponymous `json.loads`
22 | function. Essentially it contains helper methods to convert JSON strings
23 | (or a Python dictionary object) to a `dataclass` which can often contain
24 | complex types such as lists, dicts, or even other dataclasses nested
25 | within it.
26 |
27 | Refer to the :class:`AbstractLoader` class for documentation on any of the
28 | implemented methods.
29 |
30 | """
31 | __slots__ = ()
32 |
33 | def __init_subclass__(cls, **kwargs):
34 | super().__init_subclass__()
35 |
36 | cls.register_load_hook(bytes, cls.load_to_bytes)
37 | cls.register_load_hook(bytearray, cls.load_to_byte_array)
38 |
39 | @staticmethod
40 | def load_to_bytes(
41 | o: AnyStr, base_type: Type[bytes], encoding='utf-8') -> bytes:
42 |
43 | return base_type(o, encoding)
44 |
45 | @staticmethod
46 | def load_to_byte_array(
47 | o: AnyStr, base_type: Type[bytearray],
48 | encoding='utf-8') -> bytearray:
49 |
50 | return base_type(o, encoding) if isinstance(o, str) else base_type(o)
51 |
52 | @staticmethod
53 | @_single_arg_alias('base_type')
54 | def load_to_uuid(o: Union[AnyStr, U], base_type: Type[U]) -> U:
55 | # alias: base_type(o)
56 | ...
57 |
58 | @staticmethod
59 | def load_to_iterable(
60 | o: Iterable, base_type: Type[LSQ],
61 | elem_parser: AbstractParser) -> LSQ:
62 |
63 | return super(EnvLoader, EnvLoader).load_to_iterable(
64 | as_list(o), base_type, elem_parser)
65 |
66 | @staticmethod
67 | def load_to_tuple(
68 | o: Union[List, Tuple], base_type: Type[Tuple],
69 | elem_parsers: Sequence[AbstractParser]) -> Tuple:
70 |
71 | return super(EnvLoader, EnvLoader).load_to_tuple(
72 | as_list(o), base_type, elem_parsers)
73 |
74 | @staticmethod
75 | def load_to_named_tuple(
76 | o: Union[Dict, List, Tuple], base_type: Type[NT],
77 | field_to_parser: 'FieldToParser',
78 | field_parsers: List[AbstractParser]) -> NT:
79 |
80 | # TODO check for both list and dict
81 |
82 | return super(EnvLoader, EnvLoader).load_to_named_tuple(
83 | as_list(o), base_type, field_to_parser, field_parsers)
84 |
85 | @staticmethod
86 | def load_to_named_tuple_untyped(
87 | o: Union[Dict, List, Tuple], base_type: Type[NT],
88 | dict_parser: AbstractParser, list_parser: AbstractParser) -> NT:
89 |
90 | return super(EnvLoader, EnvLoader).load_to_named_tuple_untyped(
91 | as_list(o), base_type, dict_parser, list_parser)
92 |
93 | @staticmethod
94 | def load_to_dict(
95 | o: Dict, base_type: Type[M],
96 | key_parser: AbstractParser,
97 | val_parser: AbstractParser) -> M:
98 |
99 | return super(EnvLoader, EnvLoader).load_to_dict(
100 | as_dict(o), base_type, key_parser, val_parser)
101 |
102 | @staticmethod
103 | def load_to_defaultdict(
104 | o: Dict, base_type: Type[DD],
105 | default_factory: DefFactory,
106 | key_parser: AbstractParser,
107 | val_parser: AbstractParser) -> DD:
108 |
109 | return super(EnvLoader, EnvLoader).load_to_defaultdict(
110 | as_dict(o), base_type, default_factory, key_parser, val_parser)
111 |
112 | @staticmethod
113 | def load_to_typed_dict(
114 | o: Dict, base_type: Type[M],
115 | key_to_parser: 'FieldToParser',
116 | required_keys: FrozenKeys,
117 | optional_keys: FrozenKeys) -> M:
118 |
119 | return super(EnvLoader, EnvLoader).load_to_typed_dict(
120 | as_dict(o), base_type, key_to_parser, required_keys, optional_keys)
121 |
122 | @staticmethod
123 | def load_to_datetime(
124 | o: Union[str, N], base_type: Type[datetime]) -> datetime:
125 | if isinstance(o, str):
126 | # Check if it's a string in numeric format, like '1.23'
127 | if o.replace('.', '', 1).isdigit():
128 | return base_type.fromtimestamp(float(o), tz=timezone.utc)
129 |
130 | return base_type.fromisoformat(o.replace('Z', '+00:00', 1))
131 |
132 | # default: as_datetime
133 | return as_datetime(o, base_type)
134 |
135 | @staticmethod
136 | def load_to_date(o: Union[str, N], base_type: Type[date]) -> date:
137 | if isinstance(o, str):
138 | # Check if it's a string in numeric format, like '1.23'
139 | if o.replace('.', '', 1).isdigit():
140 | return base_type.fromtimestamp(float(o))
141 |
142 | return base_type.fromisoformat(o)
143 |
144 | # default: as_date
145 | return as_date(o, base_type)
146 |
147 | @staticmethod
148 | def load_func_for_dataclass(
149 | cls: Type[T],
150 | config: Optional[META],
151 | is_main_class: bool = False,
152 | ) -> Callable[['str | JSONObject | T', Type[T]], T]:
153 |
154 | load = load_func_for_dataclass(
155 | cls,
156 | is_main_class=False,
157 | config=config,
158 | # override the loader class
159 | loader_cls=EnvLoader,
160 | )
161 |
162 | def load_to_dataclass(o: 'str | JSONObject | T', *_) -> T:
163 | """
164 | Receives either a string or a `dict` as an input, and return a
165 | dataclass instance of type `cls`.
166 | """
167 | if type(o) is cls:
168 | return o
169 |
170 | return load(as_dict(o))
171 |
172 | return load_to_dataclass
173 |
--------------------------------------------------------------------------------
/docs/wiz_cli.rst:
--------------------------------------------------------------------------------
1 | .. highlight:: shell
2 |
3 | The CLI Tool
4 | ============
5 |
6 | The ``wiz`` command provides a companion CLI tool for the Dataclass Wizard,
7 | which further simplifies interaction with the Python ``dataclasses`` module.
8 |
9 | Getting help::
10 |
11 | $ wiz -h
12 | usage: wiz [-h] [-V] {gen-schema,gs} ...
13 |
14 | A companion CLI tool for the Dataclass Wizard, which simplifies interaction with the Python `dataclasses` module.
15 |
16 | positional arguments:
17 | {gen-schema,gs} Supported sub-commands
18 | gen-schema (gs)
19 | Generates a Python dataclass schema, given a JSON input.
20 |
21 | optional arguments:
22 | -h, --help show this help message and exit
23 | -V, --version Display the version of this tool.
24 |
25 | Checking the version of the CLI tool should display the currently installed
26 | version of the ``dataclass-wizard`` library::
27 |
28 | $ wiz -V
29 |
30 | To get help on a subcommand, simply use ``wiz -h``. For example::
31 |
32 | $ wiz gs -h
33 |
34 | JSON To Dataclass Generation Tool
35 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
36 |
37 | The subcommand ``gen-schema`` (aliased to ``gs``) provides a JSON to Python
38 | schema generation tool. This utility takes a JSON file or string as an input,
39 | and outputs the corresponding dataclass schema. The main purpose is to easily
40 | create dataclasses that can be used with API output, without resorting to
41 | ``dict``'s or ``NamedTuple``'s.
42 |
43 | This scheme generation tool is inspired by the following projects:
44 |
45 | - https://github.com/mischareitsma/json2dataclass
46 | - https://russbiggs.github.io/json2dataclass
47 | - https://github.com/mholt/json-to-go
48 | - https://github.com/bermi/Python-Inflector
49 |
50 | .. note:: A few things to consider:
51 |
52 | - The script sometimes has to make some assumptions, so give the output a once-over.
53 | - In an array of objects (i.e. dictionaries), all key names and type definitions get merged into a single
54 | model ``dataclass``, as the objects are considered homogenous in this case.
55 | - Deeply nested lists within objects (e.g. *list* -> *dict* -> *list*) should
56 | similarly merge all list elements with the other lists under that key in
57 | each sibling `dict` object.
58 | - The output is properly formatted, including additional spacing where needed.
59 | Please consider `opening an issue`_ if there are any potential improvements
60 | to be made.
61 |
62 | Example usage::
63 |
64 | echo '{
65 | "name": "Yavin IV",
66 | "rotation_period": "24",
67 | "orbital_period": "4818",
68 | "diameter": "10200",
69 | "climate": "temperate, tropical",
70 | "gravity": "1 standard",
71 | "terrain": "jungle, rainforests",
72 | "surface_water": "8",
73 | "population": "1000",
74 | "residents": [],
75 | "films": [
76 | "https://swapi.co/api/films/1/"
77 | ],
78 | "created": "2014-12-10T11:37:19.144000Z",
79 | "edited": "2014-12-20T20:58:18.421000Z",
80 | "url": "https://swapi.co/api/planets/3/"
81 | }' | wiz gs
82 |
83 | Generates the following Python code::
84 |
85 | from dataclasses import dataclass
86 | from datetime import datetime
87 | from typing import List, Union
88 |
89 |
90 | @dataclass
91 | class Data:
92 | """
93 | Data dataclass
94 |
95 | """
96 | name: str
97 | rotation_period: Union[int, str]
98 | orbital_period: Union[int, str]
99 | diameter: Union[int, str]
100 | climate: str
101 | gravity: str
102 | terrain: str
103 | surface_water: Union[int, str]
104 | population: Union[int, str]
105 | residents: List
106 | films: List[str]
107 | created: datetime
108 | edited: datetime
109 | url: str
110 |
111 |
112 | Note: to write the output to a Python file instead of displaying the
113 | output in the terminal, pass the name of the output file. If the file
114 | has no extension, a default ``.py`` extension will be added.
115 |
116 | For example::
117 |
118 | # Note: the following command writes to a new file 'out.py'
119 |
120 | echo '' | wiz gs - out
121 |
122 | Future Annotations
123 | ------------------
124 |
125 | Passing in the ``-x/--experimental`` flag will enable experimental features via
126 | a ``__future__`` import, which allows `PEP 585`_ and `PEP 604`_- style
127 | annotations to be used in Python 3.7+
128 |
129 | For example, assume your ``input.json`` file contains the following contents:
130 |
131 | .. code:: json
132 |
133 | {
134 | "myField": null,
135 | "My_List": [],
136 | "Objects": [
137 | {
138 | "key1": false
139 | },
140 | {
141 | "key1": 1.2,
142 | "key2": "string"
143 | },
144 | {
145 | "key1": "val",
146 | "key2": null
147 | }
148 | ]
149 | }
150 |
151 | Then we could run the following command::
152 |
153 | $ wiz gs -x input.json
154 |
155 | The generated Python code is slightly different, as shown below. You might notice
156 | that a ``__future__`` import is added at the top, for compatibility with versions
157 | earlier than Python 3.10. In the annotations, we also prefer to use parameterized
158 | standard collections, and use the new pipe ``|`` syntax to represent ``Union``
159 | and ``Optional`` types.
160 |
161 | .. code:: python3
162 |
163 | from __future__ import annotations
164 |
165 | from dataclasses import dataclass
166 | from typing import Any
167 |
168 | from dataclass_wizard import JSONWizard
169 |
170 |
171 | @dataclass
172 | class Data(JSONWizard):
173 | """
174 | Data dataclass
175 |
176 | """
177 | my_field: Any
178 | my_list: list
179 | objects: list[Object]
180 |
181 |
182 | @dataclass
183 | class Object:
184 | """
185 | Object dataclass
186 |
187 | """
188 | key1: bool | float | str
189 | key2: str | None
190 |
191 |
192 | .. _`opening an issue`: https://github.com/rnag/dataclass-wizard/issues
193 | .. _`PEP 585`: https://www.python.org/dev/peps/pep-0585/
194 | .. _`PEP 604`: https://www.python.org/dev/peps/pep-0604/
195 |
--------------------------------------------------------------------------------
/docs/common_use_cases/nested_key_paths.rst:
--------------------------------------------------------------------------------
1 | Map a Nested JSON Key Path to a Field
2 | =====================================
3 |
4 | .. note::
5 | **Important:** The current "nested path" functionality is being re-imagined.
6 | Please refer to the new docs for **V1 Opt-in** features, which introduces enhanced support for these use
7 | cases. For more details, see the `Field Guide to V1 Opt‐in`_ and the `V1 Alias`_ documentation.
8 |
9 | This change is part of the ongoing improvements in version ``v0.35.0+``, and the old functionality will no longer be maintained in future releases.
10 |
11 | .. _Field Guide to V1 Opt‐in: https://github.com/rnag/dataclass-wizard/wiki/Field-Guide-to-V1-Opt%E2%80%90in
12 | .. _V1 Alias: https://dcw.ritviknag.com/en/latest/common_use_cases/v1_alias.html
13 |
14 | The ``dataclass-wizard`` library allows mapping deeply nested JSON paths to individual dataclass fields using a custom object path notation. This feature supports both :type:`Annotated` types and :class:`dataclasses.Field` for flexible and precise JSON deserialization.
15 |
16 | .. role:: bc
17 | :class: bold-code
18 |
19 | Basic Usage Example
20 | -------------------
21 |
22 | Define and use nested key paths for JSON deserialization with the :type:`Annotated` type and :func:`path_field`:
23 |
24 | .. code:: python3
25 |
26 | from dataclasses import dataclass
27 | from dataclass_wizard import JSONWizard, KeyPath, path_field
28 | from typing import Annotated
29 |
30 | @dataclass
31 | class Example(JSONWizard):
32 | # Map using Annotated with KeyPath
33 | an_int: Annotated[int, KeyPath('data.nested.int')]
34 | # Map using path_field with a default value
35 | my_str: str = path_field(['metadata', 'info', 'name'], default='unknown')
36 |
37 | - The field ``an_int`` maps to the nested JSON path ``data.nested.int``.
38 | - The field ``my_str`` maps to the path ``metadata.info.name`` and defaults to ``'unknown'`` if the key is missing.
39 |
40 | Expanded Example with JSON
41 | ---------------------------
42 |
43 | Given the following JSON data:
44 |
45 | .. code-block:: json
46 |
47 | {
48 | "data": {
49 | "nested": {
50 | "int": 42
51 | }
52 | },
53 | "metadata": {
54 | "info": {
55 | "name": "John Doe"
56 | }
57 | }
58 | }
59 |
60 | Deserializing with the :meth:`from_dict` method:
61 |
62 | .. code:: python3
63 |
64 | example = Example.from_dict({
65 | "data": {
66 | "nested": {
67 | "int": 42
68 | }
69 | },
70 | "metadata": {
71 | "info": {
72 | "name": "John Doe"
73 | }
74 | }
75 | })
76 | print(example.an_int) # 42
77 | print(example.my_str) # 'John Doe'
78 |
79 | This example shows how JSON data is mapped to dataclass fields using the custom key paths.
80 |
81 | Object Path Notation
82 | --------------------
83 |
84 | The object path notation used in :func:`KeyPath` and :func:`path_field` follows these rules:
85 |
86 | - **Dot** (:bc:`.`) separates nested object keys.
87 | - **Square brackets** (:bc:`[]`) access array elements or special keys.
88 | - **Quotes** (:bc:`"`:bc:`'`) are required for keys with spaces, special characters, or reserved names.
89 |
90 | .. |dot| raw:: html
91 |
92 | .
93 |
94 | Examples:
95 |
96 | 1. **Simple Path**
97 | ``data.info.name``
98 | Accesses the ``name`` key inside the ``info`` object within ``data``.
99 |
100 | 2. **Array Indexing**
101 | ``data[0].value``
102 | Accesses the ``value`` field in the first element of the ``data`` array.
103 |
104 | 3. **Keys with Spaces or Special Characters**
105 | ``metadata["user name"].details``
106 | Accesses the ``details`` key inside ``metadata["user name"]``.
107 |
108 | 4. **Mixed Types**
109 | ``data[0]["user name"].info.age``
110 | Accesses ``age`` within ``info``, nested under ``"user name"`` in the first item of ``data``.
111 |
112 | Path Parsing Examples
113 | ---------------------
114 |
115 | These examples illustrate how the path is interpreted by ``KeyPath`` or ``path_field``:
116 |
117 | - **Example 1: Boolean Path**
118 |
119 | .. code:: python3
120 |
121 | split_object_path('user[true]')
122 |
123 | Output: ``['user', True]``
124 | Accesses the ``True`` key in the ``user`` object. Booleans like ``True`` and ``False`` are automatically recognized.
125 |
126 | - **Example 2: Integer Path**
127 |
128 | .. code:: python3
129 |
130 | split_object_path('data[5].value')
131 |
132 | Output: ``['data', 5, 'value']``
133 | Accesses ``value`` in the 6th element (index 5) of the ``data`` array.
134 |
135 | - **Example 3: Floats in Paths**
136 |
137 | .. code:: python3
138 |
139 | split_object_path('data[0.25]')
140 |
141 | Output: ``['data', 0.25]``
142 | Floats are parsed correctly, although array indices are typically integers.
143 |
144 | - **Example 4: Strings Without Quotes**
145 |
146 | .. code:: python3
147 |
148 | split_object_path('data[user_name]')
149 |
150 | Output: ``['data', 'user_name']``
151 | Valid identifiers are treated as strings even without quotes.
152 |
153 | - **Example 5: Strings With Quotes**
154 |
155 | .. code:: python3
156 |
157 | split_object_path('data["user name"]')
158 |
159 | Output: ``['data', 'user name']``
160 | Quotes are required for keys with spaces or special characters.
161 |
162 | - **Example 6: Mixed Types**
163 |
164 | .. code:: python3
165 |
166 | split_object_path('data[0]["user name"].info[age]')
167 |
168 | Output: ``['data', 0, 'user name', 'info', 'age']``
169 | Accesses ``age`` within ``info``, under ``user name``, in the first item of ``data``.
170 |
171 | Handling Quotes
172 | ---------------
173 |
174 | When keys or indices are wrapped in quotes, they are interpreted as strings. This is necessary for:
175 |
176 | - Keys with spaces or special characters.
177 | - Reserved words or identifiers that could otherwise cause parsing errors.
178 |
179 | Example:
180 |
181 | .. code:: python3
182 |
183 | split_object_path('data["123"].info')
184 |
185 | Output: ``['data', '123', 'info']``
186 | Here, ``"123"`` is treated as a string because of the quotes.
187 |
188 | Best Practices
189 | --------------
190 |
191 | - Use :type:`Annotated` with :func:`KeyPath` for complex, deeply nested paths.
192 | - Use :func:`path_field` for flexibility, defaults, or custom serialization.
193 | - Keep paths concise and use quotes judiciously for clarity and correctness.
194 |
--------------------------------------------------------------------------------
/docs/examples.rst:
--------------------------------------------------------------------------------
1 | Examples
2 | ========
3 |
4 | Simple
5 | ~~~~~~
6 |
7 | The following example has been tested on **Python 3.9+**.
8 |
9 | .. code:: python3
10 |
11 | # Note: in Python 3.10+, this import can be removed
12 | from __future__ import annotations
13 |
14 | from dataclasses import dataclass, field
15 |
16 | from dataclass_wizard import JSONWizard
17 |
18 |
19 | @dataclass
20 | class MyClass(JSONWizard):
21 | my_str: str | None
22 | is_active_tuple: tuple[bool, ...]
23 | list_of_int: list[int] = field(default_factory=list)
24 |
25 |
26 | string = """
27 | {
28 | "my_str": 20,
29 | "ListOfInt": ["1", "2", 3],
30 | "isActiveTuple": ["true", "false", 1, false]
31 | }
32 | """
33 |
34 | # De-serialize the JSON string into a `MyClass` object.
35 | c = MyClass.from_json(string)
36 |
37 | print(repr(c))
38 | # prints:
39 | # MyClass(my_str='20', is_active_tuple=(True, False, True, False), list_of_int=[1, 2, 3])
40 |
41 | print(c.to_json())
42 | # prints:
43 | # {"myStr": "20", "isActiveTuple": [true, false, true, false], "listOfInt": [1, 2, 3]}
44 |
45 | # True
46 | assert c == c.from_dict(c.to_dict())
47 |
48 | Using Typing Imports (Deprecated)
49 | ---------------------------------
50 |
51 | This approach is supported in **Python 3.6**. Usage is the same as above.
52 |
53 | .. code:: python3
54 |
55 | from dataclasses import dataclass, field
56 | from typing import Optional, List, Tuple
57 |
58 | from dataclass_wizard import JSONWizard
59 |
60 |
61 | @dataclass
62 | class MyClass(JSONWizard):
63 | my_str: Optional[str]
64 | is_active_tuple: Tuple[bool, ...]
65 | list_of_int: List[int] = field(default_factory=list)
66 |
67 |
68 | A (More) Complete Example
69 | ~~~~~~~~~~~~~~~~~~~~~~~~~
70 |
71 | .. code:: python3
72 |
73 | from collections import defaultdict
74 | from dataclasses import dataclass, field
75 | from datetime import datetime
76 | from typing import Optional, Literal, Union, Any, NamedTuple
77 |
78 | from dataclass_wizard import JSONSerializable
79 |
80 |
81 | @dataclass
82 | class MyTestClass(JSONSerializable):
83 | my_ledger: dict[str, Any]
84 | the_answer_to_life: Optional[int]
85 | people: list['Person']
86 | is_enabled: bool = True
87 |
88 |
89 | @dataclass
90 | class Person:
91 | name: 'Name'
92 | age: int
93 | birthdate: datetime
94 | gender: Literal['M', 'F', 'N/A']
95 | occupation: Union[str, list[str]]
96 | hobbies: defaultdict[str, list[str]] = field(
97 | default_factory=lambda: defaultdict(list))
98 |
99 |
100 | class Name(NamedTuple):
101 | """A person's name"""
102 | first: str
103 | last: str
104 | salutation: Optional[Literal['Mr.', 'Mrs.', 'Ms.', 'Dr.']] = 'Mr.'
105 |
106 |
107 | data = {
108 | 'myLedger': {
109 | 'Day 1': 'some details',
110 | 'Day 17': ['a', 'sample', 'list']
111 | },
112 | 'theAnswerTOLife': '42',
113 | 'People': [
114 | {
115 | 'name': ('Roberto', 'Fuirron'),
116 | 'age': 21,
117 | 'birthdate': '1950-02-28T17:35:20Z',
118 | 'gender': 'M',
119 | 'occupation': ['sailor', 'fisher'],
120 | 'Hobbies': {'M-F': ('chess', 123, 'reading'), 'Sat-Sun': ['parasailing']}
121 | },
122 | {
123 | 'name': ('Janice', 'Darr', 'Dr.'),
124 | 'age': 45,
125 | 'birthdate': '1971-11-05 05:10:59',
126 | 'gender': 'F',
127 | 'occupation': 'Dentist'
128 | }
129 | ]
130 | }
131 |
132 | c = MyTestClass.from_dict(data)
133 |
134 | print(repr(c))
135 | # prints the following result on a single line:
136 | # MyTestClass(
137 | # my_ledger={'Day 1': 'some details', 'Day 17': ['a', 'sample', 'list']},
138 | # the_answer_to_life=42,
139 | # people=[
140 | # Person(
141 | # name=Name(first='Roberto', last='Fuirron', salutation='Mr.'),
142 | # age=21, birthdate=datetime.datetime(1950, 2, 28, 17, 35, 20, tzinfo=datetime.timezone.utc),
143 | # gender='M', occupation=['sailor', 'fisher'],
144 | # hobbies=defaultdict(, {'M-F': ['chess', '123', 'reading'], 'Sat-Sun': ['parasailing']})
145 | # ),
146 | # Person(
147 | # name=Name(first='Janice', last='Darr', salutation='Dr.'),
148 | # age=45, birthdate=datetime.datetime(1971, 11, 5, 5, 10, 59),
149 | # gender='F', occupation='Dentist',
150 | # hobbies=defaultdict(, {})
151 | # )
152 | # ], is_enabled=True)
153 |
154 | # calling `print` on the object invokes the `__str__` method, which will
155 | # pretty-print the JSON representation of the object by default. You can
156 | # also call the `to_json` method to print the JSON string on a single line.
157 |
158 | print(c)
159 | # prints:
160 | # {
161 | # "myLedger": {
162 | # "Day 1": "some details",
163 | # "Day 17": [
164 | # "a",
165 | # "sample",
166 | # "list"
167 | # ]
168 | # },
169 | # "theAnswerToLife": 42,
170 | # "people": [
171 | # {
172 | # "name": [
173 | # "Roberto",
174 | # "Fuirron",
175 | # "Mr."
176 | # ],
177 | # "age": 21,
178 | # "birthdate": "1950-02-28T17:35:20Z",
179 | # "gender": "M",
180 | # "occupation": [
181 | # "sailor",
182 | # "fisher"
183 | # ],
184 | # "hobbies": {
185 | # "M-F": [
186 | # "chess",
187 | # "123",
188 | # "reading"
189 | # ],
190 | # "Sat-Sun": [
191 | # "parasailing"
192 | # ]
193 | # }
194 | # },
195 | # {
196 | # "name": [
197 | # "Janice",
198 | # "Darr",
199 | # "Dr."
200 | # ],
201 | # "age": 45,
202 | # "birthdate": "1971-11-05T05:10:59",
203 | # "gender": "F",
204 | # "occupation": "Dentist",
205 | # "hobbies": {}
206 | # }
207 | # ],
208 | # "isEnabled": true
209 | # }
210 |
--------------------------------------------------------------------------------
/dataclass_wizard/utils/object_path.py:
--------------------------------------------------------------------------------
1 | from dataclasses import MISSING
2 |
3 | from ..errors import ParseError
4 |
5 |
6 | def safe_get(data, path, default=MISSING, raise_=True):
7 | current_data = data
8 | p = path # to avoid "unbound local variable" warnings
9 |
10 | try:
11 | for p in path:
12 | current_data = current_data[p]
13 |
14 | return current_data
15 |
16 | # IndexError -
17 | # raised when `data` is a `list`, and we access an index that is "out of bounds"
18 | # KeyError -
19 | # raised when `data` is a `dict`, and we access a key that is not present
20 | # AttributeError -
21 | # raised when `data` is an invalid type, such as a `None`
22 | except (IndexError, KeyError, AttributeError) as e:
23 | if raise_ and default is MISSING:
24 | raise _format_err(e, current_data, path, p) from None
25 | return default
26 |
27 | # TypeError -
28 | # raised when `data` is a `list`, but we try to use it like a `dict`
29 | except TypeError:
30 | e = TypeError('Invalid path')
31 | raise _format_err(e, current_data, path, p, True) from None
32 |
33 |
34 | def v1_safe_get(data, path, raise_):
35 | current_data = data
36 |
37 | try:
38 | for p in path:
39 | current_data = current_data[p]
40 |
41 | return current_data
42 |
43 | # IndexError -
44 | # raised when `data` is a `list`, and we access an index that is "out of bounds"
45 | # KeyError -
46 | # raised when `data` is a `dict`, and we access a key that is not present
47 | # AttributeError -
48 | # raised when `data` is an invalid type, such as a `None`
49 | except (IndexError, KeyError, AttributeError) as e:
50 | if raise_:
51 | p = locals().get('p', path) # to suppress "unbound local variable"
52 | raise _format_err(e, current_data, path, p, True) from None
53 |
54 | return MISSING
55 |
56 | # TypeError -
57 | # raised when `data` is a `list`, but we try to use it like a `dict`
58 | except TypeError:
59 | e = TypeError('Invalid path')
60 | p = locals().get('p', path) # to suppress "unbound local variable"
61 | raise _format_err(e, current_data, path, p, True) from None
62 |
63 |
64 | def _format_err(e, current_data, path, current_path, invalid_path=False):
65 | return ParseError(
66 | e, current_data, dict if invalid_path else None, 'load',
67 | path=' => '.join(repr(p) for p in path),
68 | current_path=repr(current_path),
69 | )
70 |
71 |
72 | # What values are considered "truthy" when converting to a boolean type.
73 | # noinspection SpellCheckingInspection
74 | _TRUTHY_VALUES = frozenset(("True", "true"))
75 |
76 | # What values are considered "falsy" when converting to a boolean type.
77 | # noinspection SpellCheckingInspection
78 | _FALSY_VALUES = frozenset(("False", "false"))
79 |
80 |
81 | # Valid starting separators in our custom "object path",
82 | # for example `a.b[c].d.[-1]` has 5 start separators.
83 | _START_SEP = frozenset(('.', '['))
84 |
85 |
86 | def split_object_path(_input):
87 | res = []
88 | s = ""
89 | start_new = True
90 | in_literal = False
91 |
92 | parsed_string_literal = False
93 |
94 | in_braces = False
95 |
96 | escape_next_quote = False
97 | quote_char = None
98 | possible_number = False
99 |
100 | for c in _input:
101 | if c in _START_SEP:
102 | if in_literal:
103 | s += c
104 | else:
105 | if c == '.':
106 | # A period within braces [xxx] OR within a string "xxx",
107 | # should be captured.
108 | if in_braces:
109 | s += c
110 | continue
111 | in_braces = False
112 | else:
113 | in_braces = True
114 |
115 | start_new = True
116 | if s:
117 | if possible_number:
118 | possible_number = False
119 | try:
120 | num = int(s)
121 | res.append(num)
122 | except ValueError:
123 | try:
124 | num = float(s)
125 | res.append(num)
126 | except ValueError:
127 | res.append(s)
128 | elif parsed_string_literal:
129 | parsed_string_literal = False
130 | res.append(s)
131 | else:
132 | if s in _TRUTHY_VALUES:
133 | res.append(True)
134 | elif s in _FALSY_VALUES:
135 | res.append(False)
136 | else:
137 | res.append(s)
138 |
139 | s = ""
140 | elif c == '\\' and in_literal:
141 | escape_next_quote = True
142 | elif escape_next_quote:
143 | if c != quote_char:
144 | # It was not an escape character after all!
145 | s += '\\'
146 | # Capture escaped character
147 | s += c
148 | escape_next_quote = False
149 | elif c == quote_char:
150 | in_literal = False
151 | quote_char = None
152 | parsed_string_literal = True
153 | elif c in {'"', "'"} and start_new:
154 | start_new = False
155 | in_literal = True
156 | quote_char = c
157 | elif (c in {'+', '-'} or c.isdigit()) and start_new:
158 | start_new = False
159 | possible_number = True
160 | s += c
161 | elif start_new:
162 | start_new = False
163 | s += c
164 | elif c == ']':
165 | if in_literal:
166 | s += c
167 | else:
168 | in_braces = False
169 | else:
170 | s += c
171 |
172 | if s:
173 | if possible_number:
174 | try:
175 | num = int(s)
176 | res.append(num)
177 | except ValueError:
178 | try:
179 | num = float(s)
180 | res.append(num)
181 | except ValueError:
182 | res.append(s)
183 | elif parsed_string_literal:
184 | res.append(s)
185 | else:
186 | if s in _TRUTHY_VALUES:
187 | res.append(True)
188 | elif s in _FALSY_VALUES:
189 | res.append(False)
190 | else:
191 | res.append(s)
192 |
193 | return res
194 |
--------------------------------------------------------------------------------
/dataclass_wizard/type_def.py:
--------------------------------------------------------------------------------
1 | __all__ = [
2 | 'Buffer',
3 | 'PyForwardRef',
4 | 'PyProtocol',
5 | 'PyDeque',
6 | 'PyTypedDict',
7 | 'PyRequired',
8 | 'PyNotRequired',
9 | 'PyReadOnly',
10 | 'PyLiteralString',
11 | 'FrozenKeys',
12 | 'DefFactory',
13 | 'NoneType',
14 | 'ExplicitNullType',
15 | 'ExplicitNull',
16 | 'JSONList',
17 | 'JSONObject',
18 | 'ListOfJSONObject',
19 | 'JSONValue',
20 | 'FileType',
21 | 'EnvFileType',
22 | 'StrCollection',
23 | 'ParseFloat',
24 | 'Encoder',
25 | 'FileEncoder',
26 | 'Decoder',
27 | 'FileDecoder',
28 | 'NUMBERS',
29 | 'T',
30 | 'E',
31 | 'U',
32 | 'M',
33 | 'NT',
34 | 'DT',
35 | 'DD',
36 | 'N',
37 | 'S',
38 | 'LT',
39 | 'LSQ',
40 | 'FREF',
41 | 'dataclass_transform',
42 | ]
43 |
44 | from collections import deque, defaultdict
45 | from datetime import date, time, datetime
46 | from enum import Enum
47 | from os import PathLike
48 | from typing import (
49 | Any, TypeVar, Sequence, Mapping,
50 | Union, NamedTuple, Callable, AnyStr, TextIO, BinaryIO,
51 | Deque as PyDeque,
52 | ForwardRef as PyForwardRef,
53 | Protocol as PyProtocol,
54 | TypedDict as PyTypedDict, Iterable, Collection,
55 | )
56 | from uuid import UUID
57 |
58 | from .constants import PY310_OR_ABOVE, PY311_OR_ABOVE, PY313_OR_ABOVE, PY312_OR_ABOVE
59 |
60 | # The class of the `None` singleton, cached for re-usability
61 | if PY310_OR_ABOVE:
62 | # https://docs.python.org/3/library/types.html#types.NoneType
63 | from types import NoneType
64 | else:
65 | # "Cannot assign to a type"
66 | NoneType = type(None) # type: ignore[misc]
67 |
68 | # Type check for numeric types - needed because `bool` is technically
69 | # a Number.
70 | NUMBERS = int, float
71 |
72 | # Generic type
73 | T = TypeVar('T')
74 | TT = TypeVar('TT')
75 |
76 | # Enum subclass type
77 | E = TypeVar('E', bound=Enum)
78 |
79 | # UUID subclass type
80 | U = TypeVar('U', bound=UUID)
81 |
82 | # Mapping type
83 | M = TypeVar('M', bound=Mapping)
84 |
85 | # NamedTuple type
86 | NT = TypeVar('NT', bound=NamedTuple)
87 |
88 | # Date, time, or datetime type
89 | DT = TypeVar('DT', date, time, datetime)
90 |
91 | # DefaultDict type
92 | DD = TypeVar('DD', bound=defaultdict)
93 |
94 | # Numeric type
95 | N = Union[int, float]
96 |
97 | # Sequence type
98 | S = TypeVar('S', bound=Sequence)
99 |
100 | # List or Tuple type
101 | LT = TypeVar('LT', list, tuple)
102 |
103 | # List, Set, or Deque (Double ended queue) type
104 | LSQ = TypeVar('LSQ', list, set, frozenset, deque)
105 |
106 | # A fixed set of key names
107 | FrozenKeys = frozenset[str]
108 |
109 | # Default factory type, assuming a no-args constructor
110 | DefFactory = Callable[[], T]
111 |
112 | # Valid collection types in JSON.
113 | JSONList = list[Any]
114 | JSONObject = dict[str, Any]
115 | ListOfJSONObject = list[JSONObject]
116 |
117 | # Valid value types in JSON.
118 | JSONValue = Union[None, str, bool, int, float, JSONList, JSONObject]
119 |
120 | # File-type argument, compatible with the type of `file` for `open`
121 | FileType = Union[str, bytes, PathLike, int]
122 |
123 | # DotEnv file-type argument (string, tuple of string, boolean, or None)
124 | EnvFileType = Union[bool, FileType, Iterable[FileType], None]
125 |
126 | # Type for a string or a collection of strings.
127 | StrCollection = Union[str, Collection[str]]
128 |
129 | # Python 3.11 introduced `Required` and `NotRequired` wrappers for
130 | # `TypedDict` fields (PEP 655). Python 3.9+ users can import the
131 | # wrappers from `typing_extensions`.
132 |
133 | if PY313_OR_ABOVE: # pragma: no cover
134 | from collections.abc import Buffer
135 |
136 | from typing import (Required as PyRequired,
137 | NotRequired as PyNotRequired,
138 | ReadOnly as PyReadOnly,
139 | LiteralString as PyLiteralString,
140 | dataclass_transform)
141 | elif PY311_OR_ABOVE: # pragma: no cover
142 | if PY312_OR_ABOVE:
143 | from collections.abc import Buffer
144 | else:
145 | from typing_extensions import Buffer
146 |
147 | from typing import (Required as PyRequired,
148 | NotRequired as PyNotRequired,
149 | LiteralString as PyLiteralString,
150 | dataclass_transform)
151 | from typing_extensions import ReadOnly as PyReadOnly
152 | else:
153 | from typing_extensions import (Buffer,
154 | Required as PyRequired,
155 | NotRequired as PyNotRequired,
156 | ReadOnly as PyReadOnly,
157 | LiteralString as PyLiteralString,
158 | dataclass_transform)
159 |
160 | # Forward references can be either strings or explicit `ForwardRef` objects.
161 | # noinspection SpellCheckingInspection
162 | FREF = TypeVar('FREF', str, PyForwardRef)
163 |
164 |
165 | class ExplicitNullType:
166 | __slots__ = () # Saves memory by preventing the creation of instance dictionaries
167 |
168 | # Class-level instance variable for singleton control
169 | _instance: "ExplicitNullType | None" = None
170 |
171 | def __new__(cls):
172 | if cls._instance is None:
173 | cls._instance = super(ExplicitNullType, cls).__new__(cls)
174 | return cls._instance
175 |
176 | def __bool__(self):
177 | return False
178 |
179 | def __repr__(self):
180 | return ''
181 |
182 |
183 | # Create the singleton instance
184 | ExplicitNull = ExplicitNullType()
185 |
186 | # Type annotations
187 | ParseFloat = Callable[[str], Any]
188 |
189 |
190 | class Encoder(PyProtocol):
191 | """
192 | Represents an encoder for Python object -> JSON, e.g. analogous to
193 | `json.dumps`
194 | """
195 |
196 | def __call__(self, obj: Union[JSONObject, JSONList],
197 | /,
198 | *args,
199 | **kwargs) -> AnyStr:
200 | ...
201 |
202 |
203 | class FileEncoder(PyProtocol):
204 | """
205 | Represents an encoder for Python object -> JSON file, e.g. analogous to
206 | `json.dump`
207 | """
208 |
209 | def __call__(self, obj: Union[JSONObject, JSONList],
210 | file: Union[TextIO, BinaryIO],
211 | **kwargs) -> AnyStr:
212 | ...
213 |
214 |
215 | class Decoder(PyProtocol):
216 | """
217 | Represents a decoder for JSON -> Python object, e.g. analogous to
218 | `json.loads`
219 | """
220 |
221 | def __call__(self, s: AnyStr,
222 | **kwargs) -> Union[JSONObject, ListOfJSONObject]:
223 | ...
224 |
225 |
226 | class FileDecoder(PyProtocol):
227 | """
228 | Represents a decoder for JSON file -> Python object, e.g. analogous to
229 | `json.load`
230 | """
231 | def __call__(self, file: Union[TextIO, BinaryIO],
232 | **kwargs) -> Union[JSONObject, ListOfJSONObject]:
233 | ...
234 |
--------------------------------------------------------------------------------
/dataclass_wizard/serial_json.py:
--------------------------------------------------------------------------------
1 | import json
2 | import logging
3 | from dataclasses import is_dataclass, dataclass
4 |
5 | from .abstractions import AbstractJSONWizard
6 | from .bases_meta import BaseJSONWizardMeta, LoadMeta, DumpMeta, register_type
7 | from .constants import PACKAGE_NAME, SINGLE_ARG_ALIAS
8 | from .class_helper import call_meta_initializer_if_needed, get_meta
9 | from .decorators import _single_arg_alias
10 | from .type_def import dataclass_transform
11 | from .loader_selection import asdict, fromdict, fromlist, get_loader, get_dumper
12 | # noinspection PyProtectedMember
13 | from .utils.dataclass_compat import _create_fn, _set_new_attribute
14 | from .type_def import dataclass_transform
15 |
16 |
17 | def _str_fn():
18 | return _create_fn('__str__',
19 | ('self',),
20 | ['return self.to_json(indent=2)'])
21 |
22 |
23 | def _configure_wizard_class(cls,
24 | str=True,
25 | debug=False,
26 | case=None,
27 | dump_case=None,
28 | load_case=None,
29 | _key_transform=None,
30 | _v1_default=False):
31 | load_meta_kwargs = {}
32 |
33 | if case is not None:
34 | _v1_default = True
35 | load_meta_kwargs['v1_case'] = case
36 |
37 | if dump_case is not None:
38 | _v1_default = True
39 | load_meta_kwargs['v1_dump_case'] = dump_case
40 |
41 | if load_case is not None:
42 | _v1_default = True
43 | load_meta_kwargs['v1_load_case'] = load_case
44 |
45 | if _v1_default:
46 | load_meta_kwargs['v1'] = True
47 |
48 | if _key_transform is not None:
49 | DumpMeta(key_transform=_key_transform).bind_to(cls)
50 |
51 | if debug:
52 | default_lvl = logging.DEBUG
53 | logging.basicConfig(level=default_lvl)
54 | # minimum logging level for logs by this library
55 | min_level = default_lvl if isinstance(debug, bool) else debug
56 | # set `v1_debug` flag for the class's Meta
57 | load_meta_kwargs['v1_debug'] = min_level
58 |
59 | # Calls the Meta initializer when inner :class:`Meta` is sub-classed.
60 | call_meta_initializer_if_needed(cls)
61 |
62 | if load_meta_kwargs:
63 | LoadMeta(**load_meta_kwargs).bind_to(cls)
64 |
65 | # Add a `__str__` method to the subclass, if needed
66 | if str:
67 | _set_new_attribute(cls, '__str__', _str_fn())
68 |
69 |
70 | @dataclass_transform()
71 | class DataclassWizard(AbstractJSONWizard):
72 |
73 | __slots__ = ()
74 |
75 | class Meta(BaseJSONWizardMeta):
76 |
77 | __slots__ = ()
78 |
79 | __is_inner_meta__ = True
80 |
81 | def __init_subclass__(cls):
82 | return cls._init_subclass()
83 |
84 | register_type = classmethod(register_type)
85 |
86 | @classmethod
87 | def from_json(cls, string, *,
88 | decoder=json.loads,
89 | **decoder_kwargs):
90 |
91 | o = decoder(string, **decoder_kwargs)
92 |
93 | return fromdict(cls, o) if isinstance(o, dict) else fromlist(cls, o)
94 |
95 | from_list = classmethod(fromlist)
96 |
97 | from_dict = classmethod(fromdict)
98 |
99 | to_dict = asdict
100 |
101 | def to_json(self, *,
102 | encoder=json.dumps,
103 | **encoder_kwargs):
104 |
105 | return encoder(asdict(self), **encoder_kwargs)
106 |
107 | @classmethod
108 | def list_to_json(cls,
109 | instances,
110 | encoder=json.dumps,
111 | **encoder_kwargs):
112 |
113 | list_of_dict = [asdict(o, cls=cls) for o in instances]
114 |
115 | return encoder(list_of_dict, **encoder_kwargs)
116 |
117 | # noinspection PyShadowingBuiltins
118 | def __init_subclass__(cls,
119 | str=False,
120 | debug=False,
121 | case=None,
122 | dump_case=None,
123 | load_case=None,
124 | _key_transform=None,
125 | _v1_default=True,
126 | _apply_dataclass=True,
127 | **dc_kwargs):
128 |
129 | super().__init_subclass__()
130 |
131 | # skip classes provided by this library
132 | if cls.__module__.startswith(f'{PACKAGE_NAME}.'):
133 | return
134 |
135 | # Apply the @dataclass decorator.
136 | if _apply_dataclass and not is_dataclass(cls):
137 | # noinspection PyArgumentList
138 | dataclass(cls, **dc_kwargs)
139 |
140 | _configure_wizard_class(cls, str, debug, case, dump_case, load_case,
141 | _key_transform, _v1_default)
142 |
143 |
144 | # noinspection PyAbstractClass
145 | @dataclass_transform()
146 | class JSONSerializable(DataclassWizard):
147 |
148 | __slots__ = ()
149 |
150 | # noinspection PyShadowingBuiltins
151 | def __init_subclass__(cls,
152 | str=True,
153 | debug=False,
154 | case=None,
155 | dump_case=None,
156 | load_case=None,
157 | _key_transform=None,
158 | _v1_default=False,
159 | _apply_dataclass=False,
160 | **_):
161 |
162 | super().__init_subclass__(str, debug, case, dump_case, load_case,
163 | _key_transform, _v1_default, _apply_dataclass)
164 |
165 |
166 | def _str_pprint_fn():
167 | from pprint import pformat
168 |
169 | def __str__(self):
170 | return pformat(self, width=70)
171 |
172 | return __str__
173 |
174 |
175 | # A handy alias in case it comes in useful to anyone :)
176 | JSONWizard = JSONSerializable
177 |
178 |
179 | class JSONPyWizard(JSONWizard):
180 | """Helper for JSONWizard that ensures dumping to JSON keeps keys as-is."""
181 |
182 | # noinspection PyShadowingBuiltins
183 | def __init_subclass__(cls,
184 | str=True,
185 | debug=False,
186 | case=None,
187 | dump_case=None,
188 | load_case=None,
189 | _key_transform=None,
190 | _v1_default=False,
191 | _apply_dataclass=False,
192 | **_):
193 | """Bind child class to DumpMeta with no key transformation."""
194 |
195 | # Call JSONSerializable.__init_subclass__()
196 | # set `key_transform_with_dump` for the class's Meta
197 | super().__init_subclass__(False, debug, case, dump_case, load_case, 'NONE',
198 | _v1_default, _apply_dataclass)
199 |
200 | # Add a `__str__` method to the subclass, if needed
201 | if str:
202 | _set_new_attribute(cls, '__str__', _str_pprint_fn())
203 |
--------------------------------------------------------------------------------