├── .gitignore ├── .pre-commit-config.yaml ├── .readthedocs.yml ├── .travis.yml ├── Makefile ├── README.rst ├── docs ├── README.md ├── conf.py ├── examples │ ├── attr_docs_example.py │ ├── env_var_exact_loc.py │ ├── load_env_prefix.py │ ├── loc_inside_text_content.py │ └── override_by_env.py ├── index.rst ├── requirements.txt └── rst_builder_ext.py ├── poetry.lock ├── pydantic_settings ├── __init__.py ├── attrs_docs.py ├── base.py ├── decoder │ ├── __init__.py │ ├── common.py │ ├── json.py │ └── yaml.py ├── errors.py ├── load.py ├── restorer.py ├── types.py └── utils.py ├── pyproject.toml └── test ├── __init__.py ├── conftest.py ├── test_deep_map_merger.py ├── test_errs_rendering.py ├── test_fields_docs.py ├── test_json_ast_decoder.py ├── test_load.py ├── test_restorer.py └── test_settings_base.py /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | pip-wheel-metadata/ 24 | share/python-wheels/ 25 | *.egg-info/ 26 | .installed.cfg 27 | *.egg 28 | MANIFEST 29 | 30 | # PyInstaller 31 | # Usually these files are written by a python script from a template 32 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 33 | *.manifest 34 | *.spec 35 | 36 | # Installer logs 37 | pip-log.txt 38 | pip-delete-this-directory.txt 39 | 40 | # Unit test / coverage reports 41 | htmlcov/ 42 | .tox/ 43 | .nox/ 44 | .coverage 45 | .coverage.* 46 | .cache 47 | nosetests.xml 48 | coverage.xml 49 | *.cover 50 | .hypothesis/ 51 | .pytest_cache/ 52 | 53 | # Translations 54 | *.mo 55 | *.pot 56 | 57 | # Django stuff: 58 | *.log 59 | local_settings.py 60 | db.sqlite3 61 | db.sqlite3-journal 62 | 63 | # Flask stuff: 64 | instance/ 65 | .webassets-cache 66 | 67 | # Scrapy stuff: 68 | .scrapy 69 | 70 | # Sphinx documentation 71 | docs/_build/ 72 | 73 | # PyBuilder 74 | target/ 75 | 76 | # Jupyter Notebook 77 | .ipynb_checkpoints 78 | 79 | # IPython 80 | profile_default/ 81 | ipython_config.py 82 | 83 | # pyenv 84 | .python-version 85 | 86 | # pipenv 87 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 88 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 89 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 90 | # install all needed dependencies. 91 | #Pipfile.lock 92 | 93 | # celery beat schedule file 94 | celerybeat-schedule 95 | 96 | # SageMath parsed files 97 | *.sage.py 98 | 99 | # Environments 100 | .env 101 | .venv 102 | env/ 103 | venv/ 104 | ENV/ 105 | env.bak/ 106 | venv.bak/ 107 | 108 | # Spyder project settings 109 | .spyderproject 110 | .spyproject 111 | 112 | # Rope project settings 113 | .ropeproject 114 | 115 | # mkdocs documentation 116 | /site 117 | 118 | # mypy 119 | .mypy_cache/ 120 | .dmypy.json 121 | dmypy.json 122 | 123 | # Pyre type checker 124 | .pyre/ 125 | 126 | # IDE's 127 | .vscode 128 | .idea 129 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | repos: 2 | - repo: https://github.com/ambv/black 3 | rev: stable 4 | hooks: 5 | - id: black 6 | language_version: python3.7 7 | 8 | exclude: docs/examples/* 9 | -------------------------------------------------------------------------------- /.readthedocs.yml: -------------------------------------------------------------------------------- 1 | # Read the Docs configuration file 2 | # See https://docs.readthedocs.io/en/stable/config-file/v2.html for details 3 | 4 | # Required 5 | version: 2 6 | 7 | # Build documentation in the docs/ directory with Sphinx 8 | sphinx: 9 | configuration: docs/conf.py 10 | 11 | # Build documentation with MkDocs 12 | #mkdocs: 13 | # configuration: mkdocs.yml 14 | 15 | # Optionally build your docs in additional formats such as PDF and ePub 16 | formats: all 17 | 18 | # Optionally set the version of Python and requirements required to build your docs 19 | python: 20 | version: 3.7 21 | install: 22 | - requirements: docs/requirements.txt 23 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: python 2 | cache: 3 | directories: 4 | - .venv 5 | 6 | python: 7 | - "3.6" 8 | - "3.7" 9 | - "3.8" 10 | - "3.9" 11 | install: 12 | - pip install poetry 13 | - poetry config virtualenvs.in-project true 14 | - ls .venv/bin/python &> /dev/null || poetry install 15 | 16 | script: 17 | - make lint-readme 18 | - make lint 19 | - make test 20 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | project_name = $(shell grep -Po '(?<=name = )"([\w-]+)"' pyproject.toml | tr -d '"') 2 | poetry_cmd=poetry 3 | CMD_PREFIX ?= ${poetry_cmd} run 4 | build_artifact_name=dist/build.whl 5 | lint_targets = $(shell echo $(project_name) | tr '-' '_') test 6 | WHEEL_NAME_FILE ?= dist/wheel_name.txt 7 | 8 | .PHONY: test docs 9 | 10 | ## Install dependencies 11 | install-deps: 12 | @${poetry_cmd} install 13 | @echo Development dependencies installed 14 | 15 | ## Check code-style 16 | lint: 17 | @${CMD_PREFIX} flake8 ${lint_targets} 18 | @${CMD_PREFIX} isort --check ${lint_targets} 19 | @${CMD_PREFIX} black --check ${lint_targets} 20 | 21 | lint-readme: 22 | @{ \ 23 | ${CMD_PREFIX} sphinx-build -M rst docs docs/build > /dev/null 2>&1; \ 24 | if ! diff docs/build/rst/README.rst README.rst; then \ 25 | echo 'README.md is not up to date, invoke "make readme"'; exit 1; \ 26 | fi; \ 27 | } 28 | 29 | ## Coerce code-style 30 | fmt: 31 | @${CMD_PREFIX} isort ${lint_targets} 32 | @${CMD_PREFIX} black ${lint_targets} 33 | 34 | ## Run unit-tests 35 | test: 36 | @${CMD_PREFIX} pytest test 37 | 38 | ## Build project into wheel, place it under "dist" folder (may be altered via $DIST_DST). 39 | ## Wheel filename can be read from "dist/wheel_name.txt" 40 | build: 41 | @{ \ 42 | set -e; \ 43 | tmp_file=$$(mktemp); \ 44 | ${poetry_cmd} build -f wheel | tee $$tmp_file; \ 45 | wheel_name=$$(cat $$tmp_file | grep -P -o '$(project_name)[-\d\w.]+'); \ 46 | echo Wheel "$$wheel_name" succesfully built; \ 47 | echo Writing wheel name into ${WHEEL_NAME_FILE}; \ 48 | echo $$(pwd)/dist/$$wheel_name > ${WHEEL_NAME_FILE}; \ 49 | } 50 | 51 | ## Build project docs 52 | docs: 53 | @${CMD_PREFIX} sphinx-build docs docs/build 54 | 55 | ## Generate README 56 | readme: 57 | @${CMD_PREFIX} sphinx-build -M rst docs docs/build 58 | @cp docs/build/rst/README.rst . 59 | 60 | @echo 61 | @echo Generated README.rst has been copied to the project root 62 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | .. 2 | AUTOGENERATED DO NOT MODIFY 3 | 4 | Pydantic settings 5 | ***************** 6 | 7 | .. 8 | 9 | Hipster-orgazmic tool to mange application settings 10 | 11 | .. image:: https://travis-ci.com/danields761/pydantic-settings.svg?branch=master 12 | :target: https://travis-ci.com/danields761/pydantic-settings 13 | .. image:: https://badge.fury.io/py/pydantic-settings.svg 14 | :target: https://badge.fury.io/py/pydantic-settings 15 | .. image:: https://readthedocs.org/projects/pydantic-settings/badge/?version=latest 16 | :target: https://pydantic-settings.readthedocs.io/en/latest/?badge=latest 17 | 18 | Library which extends `pydantic `_ functionality in scope of application settings. *Pydantic* already have settings 19 | implementation, e.g. ``pydantic.BaseSettings``, but from my point it’s missing some useful features: 20 | 21 | 1. Overriding settings values by environment variables even for nested fields 22 | 23 | 2. Providing detailed information about value location inside a loaded file or environment variable, which helps to point user mistake 24 | 25 | 3. Documenting model fields isn’t feels comfortable, but it’s really essential to write comprehensive documentation for application settings 26 | 27 | .. 28 | 29 | **NOTE:** Beta quality 30 | 31 | 32 | Installation 33 | ============ 34 | 35 | Using pip: 36 | 37 | .. code-block:: sh 38 | 39 | pip install pydantic-settings 40 | 41 | 42 | Usage example 43 | ============= 44 | 45 | 46 | Override values by env variables 47 | -------------------------------- 48 | 49 | Allows to override values for nested fields if they are represented as *pydantic* model. 50 | 51 | Here is example: 52 | 53 | .. code-block:: python 54 | 55 | from pydantic import BaseModel 56 | from pydantic_settings import BaseSettingsModel, load_settings 57 | 58 | 59 | class ComponentOptions(BaseModel): 60 | val: str 61 | 62 | 63 | class AppSettings(BaseSettingsModel): 64 | class Config: 65 | env_prefix = 'FOO' 66 | 67 | component: ComponentOptions 68 | 69 | 70 | assert ( 71 | load_settings( 72 | AppSettings, 73 | '{}', 74 | load_env=True, 75 | type_hint='json', 76 | environ={'FOO_COMPONENT_VAL': 'SOME VALUE'}, 77 | ).component.val 78 | == 'SOME VALUE' 79 | ) 80 | 81 | 82 | Point exact error location inside file 83 | -------------------------------------- 84 | 85 | .. code-block:: python 86 | 87 | from pydantic import ValidationError, IntegerError 88 | from pydantic_settings import BaseSettingsModel, load_settings, TextLocation 89 | from pydantic_settings.errors import ExtendedErrorWrapper 90 | 91 | 92 | class Foo(BaseSettingsModel): 93 | val: int 94 | 95 | 96 | try: 97 | load_settings(Foo, '{"val": "NOT AN INT"}', type_hint='json') 98 | except ValidationError as e: 99 | err_wrapper, *_ = e.raw_errors 100 | assert isinstance(err_wrapper, ExtendedErrorWrapper) 101 | assert isinstance(err_wrapper.exc, IntegerError) 102 | assert err_wrapper.source_loc == TextLocation( 103 | line=1, col=9, end_line=1, end_col=21, pos=9, end_pos=20 104 | ) 105 | else: 106 | raise Exception('must rise error') 107 | 108 | 109 | Extracts fields documentation 110 | ----------------------------- 111 | 112 | Allows to extract *Sphinx* style attributes documentation by processing AST tree of class definition 113 | 114 | .. code-block:: python 115 | 116 | from pydantic import BaseModel 117 | from pydantic_settings import with_attrs_docs 118 | 119 | 120 | @with_attrs_docs 121 | class Foo(BaseModel): 122 | bar: str 123 | """here is docs""" 124 | 125 | #: docs for baz 126 | baz: int 127 | 128 | #: yes 129 | #: of course 130 | is_there_multiline: bool = True 131 | 132 | 133 | assert Foo.__fields__['bar'].field_info.description == 'here is docs' 134 | assert Foo.__fields__['baz'].field_info.description == 'docs for baz' 135 | assert Foo.__fields__['is_there_multiline'].field_info.description == ( 136 | 'yes\nof course' 137 | ) 138 | 139 | 140 | Online docs 141 | ----------- 142 | 143 | Read more detailed documentation on the project 144 | `Read The Docs `_ page. 145 | 146 | 147 | Development setup 148 | ================= 149 | 150 | Project requires `poetry `_ for development setup. 151 | 152 | * If you aren’t have it already 153 | 154 | .. code-block:: sh 155 | 156 | pip install poetry 157 | 158 | * Install project dependencies 159 | 160 | .. code-block:: sh 161 | 162 | poetry install 163 | 164 | * Run tests 165 | 166 | .. code-block:: sh 167 | 168 | poetry run pytest . 169 | 170 | * Great, all works! Expect one optional step: 171 | 172 | * Install `pre-commit `_ for pre-commit hooks 173 | 174 | .. code-block:: sh 175 | 176 | pip install pre-commit 177 | pre-commit install 178 | 179 | That will install pre-commit hooks, which will check code with *flake8* and *black*. 180 | 181 | .. 182 | 183 | *NOTE* project uses **black** as code formatter, but i’am personally really dislike their 184 | *“double quoted strings everywhere”* style, that’s why ``black -S`` should be used 185 | (anyway it’s configured in *pyproject.toml* file) 186 | -------------------------------------------------------------------------------- /docs/README.md: -------------------------------------------------------------------------------- 1 | 2 | [//]: # (Not actually, this is original doc, but in result file you will not see this line) 3 | 4 | 5 | # Pydantic settings 6 | 7 | > Hipster-orgazmic tool to mange application settings 8 | 9 | 10 | [![Build Status](https://travis-ci.com/danields761/pydantic-settings.svg?branch=master)](https://travis-ci.com/danields761/pydantic-settings) 11 | 12 | [![PyPI version](https://badge.fury.io/py/pydantic-settings.svg)](https://badge.fury.io/py/pydantic-settings) 13 | 14 | [![Documentation Status](https://readthedocs.org/projects/pydantic-settings/badge/?version=latest)](https://pydantic-settings.readthedocs.io/en/latest/?badge=latest) 15 | 16 | 17 | Library which extends [pydantic](https://github.com/samuelcolvin/pydantic) functionality in scope of application settings. *Pydantic* already have settings 18 | implementation, e.g. `pydantic.BaseSettings`, but from my point it's missing some useful features: 19 | 20 | 1. Overriding settings values by environment variables even for nested fields 21 | 2. Providing detailed information about value location inside a loaded file or environment variable, which helps to point user mistake 22 | 3. Documenting model fields isn't feels comfortable, but it's really essential to write comprehensive documentation for application settings 23 | 24 | > __NOTE:__ Beta quality 25 | 26 | ## Installation 27 | 28 | Using pip: 29 | 30 | ```sh 31 | pip install pydantic-settings 32 | ``` 33 | 34 | ## Usage example 35 | 36 | ### Override values by env variables 37 | 38 | Allows to override values for nested fields if they are represented as *pydantic* model. 39 | 40 | Here is example: 41 | 42 | .. literalinclude:: examples/override_by_env.py 43 | :language: python 44 | 45 | 46 | ### Point exact error location inside file 47 | 48 | .. literalinclude:: examples/loc_inside_text_content.py 49 | :language: python 50 | 51 | 52 | ### Extracts fields documentation 53 | 54 | Allows to extract *Sphinx* style attributes documentation by processing AST tree of class definition 55 | 56 | .. literalinclude:: examples/attr_docs_example.py 57 | :language: python 58 | 59 | 60 | ### Online docs 61 | 62 | Read more detailed documentation on the project 63 | [Read The Docs](https://pydantic-settings.readthedocs.io/en/latest/) page. 64 | 65 | ## Development setup 66 | 67 | Project requires [poetry](https://github.com/sdispater/poetry) for development setup. 68 | 69 | * If you aren't have it already 70 | 71 | ```sh 72 | pip install poetry 73 | ``` 74 | 75 | * Install project dependencies 76 | 77 | ```sh 78 | poetry install 79 | ``` 80 | 81 | * Run tests 82 | 83 | ```sh 84 | poetry run pytest . 85 | ``` 86 | 87 | * Great, all works! Expect one optional step: 88 | 89 | * Install [pre-commit](https://github.com/pre-commit/pre-commit) for pre-commit hooks 90 | 91 | ```sh 92 | pip install pre-commit 93 | pre-commit install 94 | ``` 95 | 96 | That will install pre-commit hooks, which will check code with *flake8* and *black*. 97 | 98 | > *NOTE* project uses __black__ as code formatter, but i'am personally really dislike their 99 | > *"double quoted strings everywhere"* style, that's why `black -S` should be used 100 | > (anyway it's configured in *pyproject.toml* file) -------------------------------------------------------------------------------- /docs/conf.py: -------------------------------------------------------------------------------- 1 | # Configuration file for the Sphinx documentation builder. 2 | # 3 | # This file only contains a selection of the most common options. For a full 4 | # list see the documentation: 5 | # https://www.sphinx-doc.org/en/master/usage/configuration.html 6 | 7 | import sys 8 | from pathlib import Path 9 | from datetime import datetime 10 | from packaging.version import Version 11 | import toml 12 | 13 | 14 | # -- Project information ----------------------------------------------------- 15 | proj_meta = toml.load(Path(__file__).parents[1] / 'pyproject.toml')['tool'][ 16 | 'poetry' 17 | ] 18 | 19 | author = ', '.join(proj_meta['authors']) 20 | project = 'Pydantic Settings' 21 | copyright = f'{datetime.now().year}, {author}' 22 | release = proj_meta['version'] 23 | version = Version(release).base_version if release else '' 24 | 25 | 26 | # -- General configuration --------------------------------------------------- 27 | 28 | # Add any Sphinx extension module names here, as strings. They can be 29 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom 30 | # ones. 31 | extensions = ['autoapi.extension', 'm2r', 'sphinx_rst_builder'] 32 | source_suffix = ['.rst', '.md'] 33 | 34 | 35 | # Add any paths that contain templates here, relative to this directory. 36 | templates_path = ['_templates'] 37 | 38 | # List of patterns, relative to source directory, that match files and 39 | # directories to ignore when looking for source files. 40 | # This pattern also affects html_static_path and html_extra_path. 41 | exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] 42 | 43 | # Configure AutoAPI 44 | autoapi_dirs = ['../pydantic_settings'] 45 | autoapi_add_toctree_entry = False 46 | autoapi_options = ['members', 'undoc-members'] 47 | 48 | # -- Options for HTML output ------------------------------------------------- 49 | 50 | # The theme to use for HTML and HTML Help pages. See the documentation for 51 | # a list of builtin themes. 52 | # 53 | html_theme = 'sphinx_rtd_theme' 54 | 55 | html_theme_options = {'display_version': True, 'collapse_navigation': True} 56 | 57 | # Add any paths that contain custom static files (such as style sheets) here, 58 | # relative to this directory. They are copied after the builtin static files, 59 | # so a file named "default.css" will overwrite the builtin "default.css". 60 | html_static_path = ['_static'] 61 | 62 | 63 | def setup(app): 64 | sys.path.append(str(Path(__file__).parents[1])) 65 | 66 | from docs.rst_builder_ext import CustomBuilder 67 | 68 | app.registry.builders['rst'] = CustomBuilder 69 | -------------------------------------------------------------------------------- /docs/examples/attr_docs_example.py: -------------------------------------------------------------------------------- 1 | from pydantic import BaseModel 2 | from pydantic_settings import with_attrs_docs 3 | 4 | 5 | @with_attrs_docs 6 | class Foo(BaseModel): 7 | bar: str 8 | """here is docs""" 9 | 10 | #: docs for baz 11 | baz: int 12 | 13 | #: yes 14 | #: of course 15 | is_there_multiline: bool = True 16 | 17 | 18 | assert Foo.__fields__['bar'].field_info.description == 'here is docs' 19 | assert Foo.__fields__['baz'].field_info.description == 'docs for baz' 20 | assert Foo.__fields__['is_there_multiline'].field_info.description == ( 21 | 'yes\nof course' 22 | ) 23 | -------------------------------------------------------------------------------- /docs/examples/env_var_exact_loc.py: -------------------------------------------------------------------------------- 1 | from pydantic import ValidationError, IntegerError 2 | from pydantic_settings import BaseSettingsModel, load_settings 3 | from pydantic_settings.errors import ExtendedErrorWrapper 4 | 5 | 6 | class Foo(BaseSettingsModel): 7 | val: int 8 | 9 | 10 | try: 11 | load_settings(Foo, load_env=True, environ={'APP_val': 'NOT AN INT'}) 12 | except ValidationError as e: 13 | err_wrapper, *_ = e.raw_errors 14 | assert isinstance(err_wrapper, ExtendedErrorWrapper) 15 | assert isinstance(err_wrapper.exc, IntegerError) 16 | assert err_wrapper.source_loc == ('APP_val', None) 17 | else: 18 | raise Exception('must rise error') 19 | -------------------------------------------------------------------------------- /docs/examples/load_env_prefix.py: -------------------------------------------------------------------------------- 1 | from pydantic import BaseModel 2 | from pydantic_settings import load_settings 3 | 4 | 5 | class Foo(BaseModel): 6 | val: int 7 | 8 | 9 | assert ( 10 | load_settings( 11 | Foo, load_env=True, env_prefix='EX', environ={'EX_VAL': '10'} 12 | ).val 13 | == 10 14 | ) 15 | -------------------------------------------------------------------------------- /docs/examples/loc_inside_text_content.py: -------------------------------------------------------------------------------- 1 | from pydantic import ValidationError, IntegerError 2 | from pydantic_settings import BaseSettingsModel, load_settings, TextLocation 3 | from pydantic_settings.errors import ExtendedErrorWrapper 4 | 5 | 6 | class Foo(BaseSettingsModel): 7 | val: int 8 | 9 | 10 | try: 11 | load_settings(Foo, '{"val": "NOT AN INT"}', type_hint='json') 12 | except ValidationError as e: 13 | err_wrapper, *_ = e.raw_errors 14 | assert isinstance(err_wrapper, ExtendedErrorWrapper) 15 | assert isinstance(err_wrapper.exc, IntegerError) 16 | assert err_wrapper.source_loc == TextLocation( 17 | line=1, col=9, end_line=1, end_col=21, pos=9, end_pos=20 18 | ) 19 | else: 20 | raise Exception('must rise error') 21 | -------------------------------------------------------------------------------- /docs/examples/override_by_env.py: -------------------------------------------------------------------------------- 1 | from pydantic import BaseModel 2 | from pydantic_settings import BaseSettingsModel, load_settings 3 | 4 | 5 | class ComponentOptions(BaseModel): 6 | val: str 7 | 8 | 9 | class AppSettings(BaseSettingsModel): 10 | class Config: 11 | env_prefix = 'FOO' 12 | 13 | component: ComponentOptions 14 | 15 | 16 | assert ( 17 | load_settings( 18 | AppSettings, 19 | '{}', 20 | load_env=True, 21 | type_hint='json', 22 | environ={'FOO_COMPONENT_VAL': 'SOME VALUE'}, 23 | ).component.val 24 | == 'SOME VALUE' 25 | ) 26 | -------------------------------------------------------------------------------- /docs/index.rst: -------------------------------------------------------------------------------- 1 | Pydantic Settings documentation (|version|) 2 | =========================================== 3 | 4 | A set of tools helping to manage and work with application settings 5 | 6 | Getting Started 7 | +++++++++++++++ 8 | 9 | *Pydantic Settings* package available on `PyPI `_ 10 | 11 | .. code-block:: shell 12 | 13 | pip install pydantic-settings 14 | 15 | 16 | Manual by examples 17 | ++++++++++++++++++ 18 | 19 | Environment variables 20 | --------------------- 21 | 22 | Override settings values by env variables even for nested fields 23 | 24 | .. literalinclude:: examples/override_by_env.py 25 | :language: python 26 | 27 | It's not necessary to override :py:class:`.BaseSettingsModel` in 28 | order to use :py:func:`.load_settings` functionality. It also works with plain 29 | :py:class:`pydantic.BaseModels` subclasses. Specify :py:obj:`~.load_settings.env_prefix` 30 | in order to override default :code:`"APP"` prefix. 31 | 32 | .. literalinclude:: examples/load_env_prefix.py 33 | :language: python 34 | 35 | 36 | Rich location specifiers 37 | ------------------------ 38 | 39 | Also :py:func:`.load_settings` provides rich information about location 40 | of a wrong value inside the source. 41 | 42 | Location inside text content 43 | ............................ 44 | 45 | .. literalinclude:: examples/loc_inside_text_content.py 46 | :language: python 47 | 48 | 49 | Location among environment variables 50 | .................................... 51 | 52 | Also saves exact env variable name 53 | 54 | .. literalinclude:: examples/env_var_exact_loc.py 55 | :language: python 56 | 57 | 58 | Extract attributes docstrings 59 | ----------------------------- 60 | 61 | By default, *pydantic* offers very verbose way of documenting fields, e.g. 62 | 63 | .. code-block:: python 64 | 65 | class Foo(BaseModel): 66 | val: int = Schema(0, description='some valuable field description') 67 | 68 | That verbosity may be avoided by extracting documentation from so called *attribute 69 | docstring*, which is, for reference, also supported by *sphinx-autodoc* 70 | (also there is very old rejected :pep:`224`, which proposes it), example: 71 | 72 | .. literalinclude:: examples/attr_docs_example.py 73 | :language: python 74 | 75 | :py:class:`.BaseSettingsModel` does it automatically. 76 | 77 | .. note:: 78 | Documented multiple-definitions inside class isn’t supported because it is 79 | really unclear to which of definitions the docstring should belongs 80 | 81 | 82 | API Reference 83 | ------------- 84 | 85 | Also there is :doc:`API Reference `. 86 | It's a bit dirty, but still may provide helpful info. 87 | 88 | Indices and tables 89 | ================== 90 | 91 | * :ref:`genindex` 92 | * :ref:`modindex` 93 | * :ref:`search` 94 | -------------------------------------------------------------------------------- /docs/requirements.txt: -------------------------------------------------------------------------------- 1 | sphinx==2.2.0 2 | toml 3 | sphinx-autoapi 4 | sphinx_rtd_theme 5 | m2r 6 | sphinx-rst-builder 7 | -------------------------------------------------------------------------------- /docs/rst_builder_ext.py: -------------------------------------------------------------------------------- 1 | from docutils.nodes import SkipChildren 2 | from sphinx_rst_builder import RstBuilder, RstWriter 3 | from sphinx_rst_builder._writer import RstTranslator 4 | 5 | 6 | class CustomBuilder(RstBuilder): 7 | def prepare_writing(self, docnames): 8 | self.writer = _Writer(self) 9 | 10 | 11 | class _Writer(RstWriter): 12 | def translate(self): 13 | visitor = _Translator(self.document, self.builder) 14 | self.document.walkabout(visitor) 15 | self.output = visitor.body 16 | self.output = f'..\n AUTOGENERATED DO NOT MODIFY\n{self.output}' 17 | 18 | 19 | class _Translator(RstTranslator): 20 | def visit_reference(self, node): 21 | if len(node.children) == 1 and node.children[0].tagname == 'image': 22 | img_node = node.children[0] 23 | self.add_text(f'.. image:: {img_node["uri"]}\n') 24 | self.add_text(f' :target: {node["refuri"]}\n') 25 | raise SkipChildren 26 | else: 27 | super().visit_reference(node) 28 | -------------------------------------------------------------------------------- /poetry.lock: -------------------------------------------------------------------------------- 1 | [[package]] 2 | name = "alabaster" 3 | version = "0.7.12" 4 | description = "A configurable sidebar-enabled Sphinx theme" 5 | category = "dev" 6 | optional = false 7 | python-versions = "*" 8 | 9 | [[package]] 10 | name = "appdirs" 11 | version = "1.4.4" 12 | description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." 13 | category = "dev" 14 | optional = false 15 | python-versions = "*" 16 | 17 | [[package]] 18 | name = "astroid" 19 | version = "2.4.1" 20 | description = "An abstract syntax tree for Python with inference support." 21 | category = "dev" 22 | optional = false 23 | python-versions = ">=3.5" 24 | 25 | [package.dependencies] 26 | lazy-object-proxy = ">=1.4.0,<1.5.0" 27 | six = ">=1.12,<2.0" 28 | typed-ast = {version = ">=1.4.0,<1.5", markers = "implementation_name == \"cpython\" and python_version < \"3.8\""} 29 | wrapt = ">=1.11,<2.0" 30 | 31 | [[package]] 32 | name = "atomicwrites" 33 | version = "1.4.0" 34 | description = "Atomic file writes." 35 | category = "dev" 36 | optional = false 37 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" 38 | 39 | [[package]] 40 | name = "attrs" 41 | version = "19.3.0" 42 | description = "Classes Without Boilerplate" 43 | category = "main" 44 | optional = false 45 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" 46 | 47 | [package.extras] 48 | azure-pipelines = ["coverage", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "zope.interface", "pytest-azurepipelines"] 49 | dev = ["coverage", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "zope.interface", "sphinx", "pre-commit"] 50 | docs = ["sphinx", "zope.interface"] 51 | tests = ["coverage", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "zope.interface"] 52 | 53 | [[package]] 54 | name = "babel" 55 | version = "2.8.0" 56 | description = "Internationalization utilities" 57 | category = "dev" 58 | optional = false 59 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" 60 | 61 | [package.dependencies] 62 | pytz = ">=2015.7" 63 | 64 | [[package]] 65 | name = "black" 66 | version = "19.10b0" 67 | description = "The uncompromising code formatter." 68 | category = "dev" 69 | optional = false 70 | python-versions = ">=3.6" 71 | 72 | [package.dependencies] 73 | appdirs = "*" 74 | attrs = ">=18.1.0" 75 | click = ">=6.5" 76 | pathspec = ">=0.6,<1" 77 | regex = "*" 78 | toml = ">=0.9.4" 79 | typed-ast = ">=1.4.0" 80 | 81 | [package.extras] 82 | d = ["aiohttp (>=3.3.2)", "aiohttp-cors"] 83 | 84 | [[package]] 85 | name = "certifi" 86 | version = "2020.4.5.1" 87 | description = "Python package for providing Mozilla's CA Bundle." 88 | category = "dev" 89 | optional = false 90 | python-versions = "*" 91 | 92 | [[package]] 93 | name = "chardet" 94 | version = "3.0.4" 95 | description = "Universal encoding detector for Python 2 and 3" 96 | category = "dev" 97 | optional = false 98 | python-versions = "*" 99 | 100 | [[package]] 101 | name = "class-doc" 102 | version = "0.2.0b0" 103 | description = "Extract attributes docstrings defined in various ways" 104 | category = "main" 105 | optional = false 106 | python-versions = ">=3.6,<4.0" 107 | 108 | [package.dependencies] 109 | more-itertools = ">=7.2,<8.0" 110 | 111 | [[package]] 112 | name = "click" 113 | version = "7.1.2" 114 | description = "Composable command line interface toolkit" 115 | category = "dev" 116 | optional = false 117 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" 118 | 119 | [[package]] 120 | name = "colorama" 121 | version = "0.4.3" 122 | description = "Cross-platform colored terminal text." 123 | category = "dev" 124 | optional = false 125 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" 126 | 127 | [[package]] 128 | name = "dataclasses" 129 | version = "0.6" 130 | description = "A backport of the dataclasses module for Python 3.6" 131 | category = "main" 132 | optional = false 133 | python-versions = "*" 134 | 135 | [[package]] 136 | name = "docutils" 137 | version = "0.16" 138 | description = "Docutils -- Python Documentation Utilities" 139 | category = "dev" 140 | optional = false 141 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" 142 | 143 | [[package]] 144 | name = "flake8" 145 | version = "3.8.4" 146 | description = "the modular source code checker: pep8 pyflakes and co" 147 | category = "dev" 148 | optional = false 149 | python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7" 150 | 151 | [package.dependencies] 152 | importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} 153 | mccabe = ">=0.6.0,<0.7.0" 154 | pycodestyle = ">=2.6.0a1,<2.7.0" 155 | pyflakes = ">=2.2.0,<2.3.0" 156 | 157 | [[package]] 158 | name = "idna" 159 | version = "2.9" 160 | description = "Internationalized Domain Names in Applications (IDNA)" 161 | category = "dev" 162 | optional = false 163 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" 164 | 165 | [[package]] 166 | name = "imagesize" 167 | version = "1.2.0" 168 | description = "Getting image size from png/jpeg/jpeg2000/gif file" 169 | category = "dev" 170 | optional = false 171 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" 172 | 173 | [[package]] 174 | name = "importlib-metadata" 175 | version = "1.6.0" 176 | description = "Read metadata from Python packages" 177 | category = "dev" 178 | optional = false 179 | python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" 180 | 181 | [package.dependencies] 182 | zipp = ">=0.5" 183 | 184 | [package.extras] 185 | docs = ["sphinx", "rst.linker"] 186 | testing = ["packaging", "importlib-resources"] 187 | 188 | [[package]] 189 | name = "isort" 190 | version = "5.6.4" 191 | description = "A Python utility / library to sort Python imports." 192 | category = "dev" 193 | optional = false 194 | python-versions = ">=3.6,<4.0" 195 | 196 | [package.extras] 197 | pipfile_deprecated_finder = ["pipreqs", "requirementslib"] 198 | requirements_deprecated_finder = ["pipreqs", "pip-api"] 199 | colors = ["colorama (>=0.4.3,<0.5.0)"] 200 | 201 | [[package]] 202 | name = "jinja2" 203 | version = "2.11.2" 204 | description = "A very fast and expressive template engine." 205 | category = "dev" 206 | optional = false 207 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" 208 | 209 | [package.dependencies] 210 | MarkupSafe = ">=0.23" 211 | 212 | [package.extras] 213 | i18n = ["Babel (>=0.8)"] 214 | 215 | [[package]] 216 | name = "lazy-object-proxy" 217 | version = "1.4.3" 218 | description = "A fast and thorough lazy object proxy." 219 | category = "dev" 220 | optional = false 221 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" 222 | 223 | [[package]] 224 | name = "m2r" 225 | version = "0.2.1" 226 | description = "Markdown and reStructuredText in a single file." 227 | category = "dev" 228 | optional = false 229 | python-versions = "*" 230 | 231 | [package.dependencies] 232 | docutils = "*" 233 | mistune = "*" 234 | 235 | [[package]] 236 | name = "markupsafe" 237 | version = "1.1.1" 238 | description = "Safely add untrusted strings to HTML/XML markup." 239 | category = "dev" 240 | optional = false 241 | python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*" 242 | 243 | [[package]] 244 | name = "mccabe" 245 | version = "0.6.1" 246 | description = "McCabe checker, plugin for flake8" 247 | category = "dev" 248 | optional = false 249 | python-versions = "*" 250 | 251 | [[package]] 252 | name = "mistune" 253 | version = "0.8.4" 254 | description = "The fastest markdown parser in pure Python" 255 | category = "dev" 256 | optional = false 257 | python-versions = "*" 258 | 259 | [[package]] 260 | name = "more-itertools" 261 | version = "7.2.0" 262 | description = "More routines for operating on iterables, beyond itertools" 263 | category = "main" 264 | optional = false 265 | python-versions = ">=3.4" 266 | 267 | [[package]] 268 | name = "packaging" 269 | version = "20.4" 270 | description = "Core utilities for Python packages" 271 | category = "dev" 272 | optional = false 273 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" 274 | 275 | [package.dependencies] 276 | pyparsing = ">=2.0.2" 277 | six = "*" 278 | 279 | [[package]] 280 | name = "pathspec" 281 | version = "0.8.0" 282 | description = "Utility library for gitignore style pattern matching of file paths." 283 | category = "dev" 284 | optional = false 285 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" 286 | 287 | [[package]] 288 | name = "pluggy" 289 | version = "0.13.1" 290 | description = "plugin and hook calling mechanisms for python" 291 | category = "dev" 292 | optional = false 293 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" 294 | 295 | [package.dependencies] 296 | importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} 297 | 298 | [package.extras] 299 | dev = ["pre-commit", "tox"] 300 | 301 | [[package]] 302 | name = "py" 303 | version = "1.8.1" 304 | description = "library with cross-python path, ini-parsing, io, code, log facilities" 305 | category = "dev" 306 | optional = false 307 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" 308 | 309 | [[package]] 310 | name = "pycodestyle" 311 | version = "2.6.0" 312 | description = "Python style guide checker" 313 | category = "dev" 314 | optional = false 315 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" 316 | 317 | [[package]] 318 | name = "pydantic" 319 | version = "1.5.1" 320 | description = "Data validation and settings management using python 3.6 type hinting" 321 | category = "main" 322 | optional = false 323 | python-versions = ">=3.6" 324 | 325 | [package.dependencies] 326 | dataclasses = {version = ">=0.6", markers = "python_version < \"3.7\""} 327 | 328 | [package.extras] 329 | dotenv = ["python-dotenv (>=0.10.4)"] 330 | email = ["email-validator (>=1.0.3)"] 331 | typing_extensions = ["typing-extensions (>=3.7.2)"] 332 | 333 | [[package]] 334 | name = "pyflakes" 335 | version = "2.2.0" 336 | description = "passive checker of Python programs" 337 | category = "dev" 338 | optional = false 339 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" 340 | 341 | [[package]] 342 | name = "pygments" 343 | version = "2.6.1" 344 | description = "Pygments is a syntax highlighting package written in Python." 345 | category = "dev" 346 | optional = false 347 | python-versions = ">=3.5" 348 | 349 | [[package]] 350 | name = "pyparsing" 351 | version = "2.4.7" 352 | description = "Python parsing module" 353 | category = "dev" 354 | optional = false 355 | python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" 356 | 357 | [[package]] 358 | name = "pytest" 359 | version = "5.4.2" 360 | description = "pytest: simple powerful testing with Python" 361 | category = "dev" 362 | optional = false 363 | python-versions = ">=3.5" 364 | 365 | [package.dependencies] 366 | atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""} 367 | attrs = ">=17.4.0" 368 | colorama = {version = "*", markers = "sys_platform == \"win32\""} 369 | importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} 370 | more-itertools = ">=4.0.0" 371 | packaging = "*" 372 | pluggy = ">=0.12,<1.0" 373 | py = ">=1.5.0" 374 | wcwidth = "*" 375 | 376 | [package.extras] 377 | checkqa-mypy = ["mypy (==v0.761)"] 378 | testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xmlschema"] 379 | 380 | [[package]] 381 | name = "pytest-clarity" 382 | version = "0.3.0a0" 383 | description = "A plugin providing an alternative, colourful diff output for failing assertions." 384 | category = "dev" 385 | optional = false 386 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" 387 | 388 | [package.dependencies] 389 | pytest = ">=3.5.0" 390 | termcolor = "1.1.0" 391 | 392 | [[package]] 393 | name = "pytz" 394 | version = "2020.1" 395 | description = "World timezone definitions, modern and historical" 396 | category = "dev" 397 | optional = false 398 | python-versions = "*" 399 | 400 | [[package]] 401 | name = "pyyaml" 402 | version = "5.3.1" 403 | description = "YAML parser and emitter for Python" 404 | category = "main" 405 | optional = false 406 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" 407 | 408 | [[package]] 409 | name = "regex" 410 | version = "2020.5.14" 411 | description = "Alternative regular expression module, to replace re." 412 | category = "dev" 413 | optional = false 414 | python-versions = "*" 415 | 416 | [[package]] 417 | name = "requests" 418 | version = "2.23.0" 419 | description = "Python HTTP for Humans." 420 | category = "dev" 421 | optional = false 422 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" 423 | 424 | [package.dependencies] 425 | certifi = ">=2017.4.17" 426 | chardet = ">=3.0.2,<4" 427 | idna = ">=2.5,<3" 428 | urllib3 = ">=1.21.1,<1.25.0 || >1.25.0,<1.25.1 || >1.25.1,<1.26" 429 | 430 | [package.extras] 431 | security = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)"] 432 | socks = ["PySocks (>=1.5.6,!=1.5.7)", "win-inet-pton"] 433 | 434 | [[package]] 435 | name = "six" 436 | version = "1.15.0" 437 | description = "Python 2 and 3 compatibility utilities" 438 | category = "dev" 439 | optional = false 440 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" 441 | 442 | [[package]] 443 | name = "snowballstemmer" 444 | version = "2.0.0" 445 | description = "This package provides 26 stemmers for 25 languages generated from Snowball algorithms." 446 | category = "dev" 447 | optional = false 448 | python-versions = "*" 449 | 450 | [[package]] 451 | name = "sphinx" 452 | version = "2.4.4" 453 | description = "Python documentation generator" 454 | category = "dev" 455 | optional = false 456 | python-versions = ">=3.5" 457 | 458 | [package.dependencies] 459 | alabaster = ">=0.7,<0.8" 460 | babel = ">=1.3,<2.0 || >2.0" 461 | colorama = {version = ">=0.3.5", markers = "sys_platform == \"win32\""} 462 | docutils = ">=0.12" 463 | imagesize = "*" 464 | Jinja2 = ">=2.3" 465 | packaging = "*" 466 | Pygments = ">=2.0" 467 | requests = ">=2.5.0" 468 | snowballstemmer = ">=1.1" 469 | sphinxcontrib-applehelp = "*" 470 | sphinxcontrib-devhelp = "*" 471 | sphinxcontrib-htmlhelp = "*" 472 | sphinxcontrib-jsmath = "*" 473 | sphinxcontrib-qthelp = "*" 474 | sphinxcontrib-serializinghtml = "*" 475 | 476 | [package.extras] 477 | docs = ["sphinxcontrib-websupport"] 478 | test = ["pytest (<5.3.3)", "pytest-cov", "html5lib", "flake8 (>=3.5.0)", "flake8-import-order", "mypy (>=0.761)", "docutils-stubs"] 479 | 480 | [[package]] 481 | name = "sphinx-autoapi" 482 | version = "1.3.0" 483 | description = "Sphinx API documentation generator" 484 | category = "dev" 485 | optional = false 486 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" 487 | 488 | [package.dependencies] 489 | astroid = {version = "*", markers = "python_version >= \"3\""} 490 | Jinja2 = "*" 491 | PyYAML = "*" 492 | sphinx = ">=1.6" 493 | unidecode = "*" 494 | 495 | [package.extras] 496 | dotnet = ["sphinxcontrib-dotnetdomain"] 497 | go = ["sphinxcontrib-golangdomain"] 498 | 499 | [[package]] 500 | name = "sphinx-rst-builder" 501 | version = "0.0.3" 502 | description = "Sphinx reStructuredText builder" 503 | category = "dev" 504 | optional = false 505 | python-versions = ">=3.6" 506 | 507 | [package.dependencies] 508 | sphinx = ">=2" 509 | 510 | [[package]] 511 | name = "sphinx-rtd-theme" 512 | version = "0.4.3" 513 | description = "Read the Docs theme for Sphinx" 514 | category = "dev" 515 | optional = false 516 | python-versions = "*" 517 | 518 | [package.dependencies] 519 | sphinx = "*" 520 | 521 | [[package]] 522 | name = "sphinxcontrib-applehelp" 523 | version = "1.0.2" 524 | description = "sphinxcontrib-applehelp is a sphinx extension which outputs Apple help books" 525 | category = "dev" 526 | optional = false 527 | python-versions = ">=3.5" 528 | 529 | [package.extras] 530 | lint = ["flake8", "mypy", "docutils-stubs"] 531 | test = ["pytest"] 532 | 533 | [[package]] 534 | name = "sphinxcontrib-devhelp" 535 | version = "1.0.2" 536 | description = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp document." 537 | category = "dev" 538 | optional = false 539 | python-versions = ">=3.5" 540 | 541 | [package.extras] 542 | lint = ["flake8", "mypy", "docutils-stubs"] 543 | test = ["pytest"] 544 | 545 | [[package]] 546 | name = "sphinxcontrib-htmlhelp" 547 | version = "1.0.3" 548 | description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files" 549 | category = "dev" 550 | optional = false 551 | python-versions = ">=3.5" 552 | 553 | [package.extras] 554 | lint = ["flake8", "mypy", "docutils-stubs"] 555 | test = ["pytest", "html5lib"] 556 | 557 | [[package]] 558 | name = "sphinxcontrib-jsmath" 559 | version = "1.0.1" 560 | description = "A sphinx extension which renders display math in HTML via JavaScript" 561 | category = "dev" 562 | optional = false 563 | python-versions = ">=3.5" 564 | 565 | [package.extras] 566 | test = ["pytest", "flake8", "mypy"] 567 | 568 | [[package]] 569 | name = "sphinxcontrib-qthelp" 570 | version = "1.0.3" 571 | description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp document." 572 | category = "dev" 573 | optional = false 574 | python-versions = ">=3.5" 575 | 576 | [package.extras] 577 | lint = ["flake8", "mypy", "docutils-stubs"] 578 | test = ["pytest"] 579 | 580 | [[package]] 581 | name = "sphinxcontrib-serializinghtml" 582 | version = "1.1.4" 583 | description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs \"serialized\" HTML files (json and pickle)." 584 | category = "dev" 585 | optional = false 586 | python-versions = ">=3.5" 587 | 588 | [package.extras] 589 | lint = ["flake8", "mypy", "docutils-stubs"] 590 | test = ["pytest"] 591 | 592 | [[package]] 593 | name = "termcolor" 594 | version = "1.1.0" 595 | description = "ANSII Color formatting for output in terminal." 596 | category = "dev" 597 | optional = false 598 | python-versions = "*" 599 | 600 | [[package]] 601 | name = "toml" 602 | version = "0.10.1" 603 | description = "Python Library for Tom's Obvious, Minimal Language" 604 | category = "dev" 605 | optional = false 606 | python-versions = "*" 607 | 608 | [[package]] 609 | name = "tomlkit" 610 | version = "0.5.11" 611 | description = "Style preserving TOML library" 612 | category = "main" 613 | optional = false 614 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" 615 | 616 | [[package]] 617 | name = "typed-ast" 618 | version = "1.4.1" 619 | description = "a fork of Python 2 and 3 ast modules with type comment support" 620 | category = "dev" 621 | optional = false 622 | python-versions = "*" 623 | 624 | [[package]] 625 | name = "typing-extensions" 626 | version = "3.7.4.2" 627 | description = "Backported and Experimental Type Hints for Python 3.5+" 628 | category = "main" 629 | optional = false 630 | python-versions = "*" 631 | 632 | [[package]] 633 | name = "unidecode" 634 | version = "1.1.1" 635 | description = "ASCII transliterations of Unicode text" 636 | category = "dev" 637 | optional = false 638 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" 639 | 640 | [[package]] 641 | name = "urllib3" 642 | version = "1.25.9" 643 | description = "HTTP library with thread-safe connection pooling, file post, and more." 644 | category = "dev" 645 | optional = false 646 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4" 647 | 648 | [package.extras] 649 | brotli = ["brotlipy (>=0.6.0)"] 650 | secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "pyOpenSSL (>=0.14)", "ipaddress"] 651 | socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] 652 | 653 | [[package]] 654 | name = "wcwidth" 655 | version = "0.1.9" 656 | description = "Measures number of Terminal column cells of wide-character codes" 657 | category = "dev" 658 | optional = false 659 | python-versions = "*" 660 | 661 | [[package]] 662 | name = "wrapt" 663 | version = "1.12.1" 664 | description = "Module for decorators, wrappers and monkey patching." 665 | category = "dev" 666 | optional = false 667 | python-versions = "*" 668 | 669 | [[package]] 670 | name = "zipp" 671 | version = "3.1.0" 672 | description = "Backport of pathlib-compatible object wrapper for zip files" 673 | category = "dev" 674 | optional = false 675 | python-versions = ">=3.6" 676 | 677 | [package.extras] 678 | docs = ["sphinx", "jaraco.packaging (>=3.2)", "rst.linker (>=1.9)"] 679 | testing = ["jaraco.itertools", "func-timeout"] 680 | 681 | [metadata] 682 | lock-version = "1.1" 683 | python-versions = "^3.6" 684 | content-hash = "9a942ec46bfc5538b94a1a93b5c30ba1ac984ae57dc0e25952de2e9b4bce1664" 685 | 686 | [metadata.files] 687 | alabaster = [ 688 | {file = "alabaster-0.7.12-py2.py3-none-any.whl", hash = "sha256:446438bdcca0e05bd45ea2de1668c1d9b032e1a9154c2c259092d77031ddd359"}, 689 | {file = "alabaster-0.7.12.tar.gz", hash = "sha256:a661d72d58e6ea8a57f7a86e37d86716863ee5e92788398526d58b26a4e4dc02"}, 690 | ] 691 | appdirs = [ 692 | {file = "appdirs-1.4.4-py2.py3-none-any.whl", hash = "sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128"}, 693 | {file = "appdirs-1.4.4.tar.gz", hash = "sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41"}, 694 | ] 695 | astroid = [ 696 | {file = "astroid-2.4.1-py3-none-any.whl", hash = "sha256:d8506842a3faf734b81599c8b98dcc423de863adcc1999248480b18bd31a0f38"}, 697 | {file = "astroid-2.4.1.tar.gz", hash = "sha256:4c17cea3e592c21b6e222f673868961bad77e1f985cb1694ed077475a89229c1"}, 698 | ] 699 | atomicwrites = [ 700 | {file = "atomicwrites-1.4.0-py2.py3-none-any.whl", hash = "sha256:6d1784dea7c0c8d4a5172b6c620f40b6e4cbfdf96d783691f2e1302a7b88e197"}, 701 | {file = "atomicwrites-1.4.0.tar.gz", hash = "sha256:ae70396ad1a434f9c7046fd2dd196fc04b12f9e91ffb859164193be8b6168a7a"}, 702 | ] 703 | attrs = [ 704 | {file = "attrs-19.3.0-py2.py3-none-any.whl", hash = "sha256:08a96c641c3a74e44eb59afb61a24f2cb9f4d7188748e76ba4bb5edfa3cb7d1c"}, 705 | {file = "attrs-19.3.0.tar.gz", hash = "sha256:f7b7ce16570fe9965acd6d30101a28f62fb4a7f9e926b3bbc9b61f8b04247e72"}, 706 | ] 707 | babel = [ 708 | {file = "Babel-2.8.0-py2.py3-none-any.whl", hash = "sha256:d670ea0b10f8b723672d3a6abeb87b565b244da220d76b4dba1b66269ec152d4"}, 709 | {file = "Babel-2.8.0.tar.gz", hash = "sha256:1aac2ae2d0d8ea368fa90906567f5c08463d98ade155c0c4bfedd6a0f7160e38"}, 710 | ] 711 | black = [ 712 | {file = "black-19.10b0-py36-none-any.whl", hash = "sha256:1b30e59be925fafc1ee4565e5e08abef6b03fe455102883820fe5ee2e4734e0b"}, 713 | {file = "black-19.10b0.tar.gz", hash = "sha256:c2edb73a08e9e0e6f65a0e6af18b059b8b1cdd5bef997d7a0b181df93dc81539"}, 714 | ] 715 | certifi = [ 716 | {file = "certifi-2020.4.5.1-py2.py3-none-any.whl", hash = "sha256:1d987a998c75633c40847cc966fcf5904906c920a7f17ef374f5aa4282abd304"}, 717 | {file = "certifi-2020.4.5.1.tar.gz", hash = "sha256:51fcb31174be6e6664c5f69e3e1691a2d72a1a12e90f872cbdb1567eb47b6519"}, 718 | ] 719 | chardet = [ 720 | {file = "chardet-3.0.4-py2.py3-none-any.whl", hash = "sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691"}, 721 | {file = "chardet-3.0.4.tar.gz", hash = "sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae"}, 722 | ] 723 | class-doc = [ 724 | {file = "class-doc-0.2.0b0.tar.gz", hash = "sha256:ec3add94416f70f6a682440b6649f52132eb5015e847cbbc8f85df601aa75f84"}, 725 | {file = "class_doc-0.2.0b0-py3-none-any.whl", hash = "sha256:df9fcd0bba659e0fb491769bc4c9e2619ce571d392960c4def6f5472ba76cf64"}, 726 | ] 727 | click = [ 728 | {file = "click-7.1.2-py2.py3-none-any.whl", hash = "sha256:dacca89f4bfadd5de3d7489b7c8a566eee0d3676333fbb50030263894c38c0dc"}, 729 | {file = "click-7.1.2.tar.gz", hash = "sha256:d2b5255c7c6349bc1bd1e59e08cd12acbbd63ce649f2588755783aa94dfb6b1a"}, 730 | ] 731 | colorama = [ 732 | {file = "colorama-0.4.3-py2.py3-none-any.whl", hash = "sha256:7d73d2a99753107a36ac6b455ee49046802e59d9d076ef8e47b61499fa29afff"}, 733 | {file = "colorama-0.4.3.tar.gz", hash = "sha256:e96da0d330793e2cb9485e9ddfd918d456036c7149416295932478192f4436a1"}, 734 | ] 735 | dataclasses = [ 736 | {file = "dataclasses-0.6-py3-none-any.whl", hash = "sha256:454a69d788c7fda44efd71e259be79577822f5e3f53f029a22d08004e951dc9f"}, 737 | {file = "dataclasses-0.6.tar.gz", hash = "sha256:6988bd2b895eef432d562370bb707d540f32f7360ab13da45340101bc2307d84"}, 738 | ] 739 | docutils = [ 740 | {file = "docutils-0.16-py2.py3-none-any.whl", hash = "sha256:0c5b78adfbf7762415433f5515cd5c9e762339e23369dbe8000d84a4bf4ab3af"}, 741 | {file = "docutils-0.16.tar.gz", hash = "sha256:c2de3a60e9e7d07be26b7f2b00ca0309c207e06c100f9cc2a94931fc75a478fc"}, 742 | ] 743 | flake8 = [ 744 | {file = "flake8-3.8.4-py2.py3-none-any.whl", hash = "sha256:749dbbd6bfd0cf1318af27bf97a14e28e5ff548ef8e5b1566ccfb25a11e7c839"}, 745 | {file = "flake8-3.8.4.tar.gz", hash = "sha256:aadae8761ec651813c24be05c6f7b4680857ef6afaae4651a4eccaef97ce6c3b"}, 746 | ] 747 | idna = [ 748 | {file = "idna-2.9-py2.py3-none-any.whl", hash = "sha256:a068a21ceac8a4d63dbfd964670474107f541babbd2250d61922f029858365fa"}, 749 | {file = "idna-2.9.tar.gz", hash = "sha256:7588d1c14ae4c77d74036e8c22ff447b26d0fde8f007354fd48a7814db15b7cb"}, 750 | ] 751 | imagesize = [ 752 | {file = "imagesize-1.2.0-py2.py3-none-any.whl", hash = "sha256:6965f19a6a2039c7d48bca7dba2473069ff854c36ae6f19d2cde309d998228a1"}, 753 | {file = "imagesize-1.2.0.tar.gz", hash = "sha256:b1f6b5a4eab1f73479a50fb79fcf729514a900c341d8503d62a62dbc4127a2b1"}, 754 | ] 755 | importlib-metadata = [ 756 | {file = "importlib_metadata-1.6.0-py2.py3-none-any.whl", hash = "sha256:2a688cbaa90e0cc587f1df48bdc97a6eadccdcd9c35fb3f976a09e3b5016d90f"}, 757 | {file = "importlib_metadata-1.6.0.tar.gz", hash = "sha256:34513a8a0c4962bc66d35b359558fd8a5e10cd472d37aec5f66858addef32c1e"}, 758 | ] 759 | isort = [ 760 | {file = "isort-5.6.4-py3-none-any.whl", hash = "sha256:dcab1d98b469a12a1a624ead220584391648790275560e1a43e54c5dceae65e7"}, 761 | {file = "isort-5.6.4.tar.gz", hash = "sha256:dcaeec1b5f0eca77faea2a35ab790b4f3680ff75590bfcb7145986905aab2f58"}, 762 | ] 763 | jinja2 = [ 764 | {file = "Jinja2-2.11.2-py2.py3-none-any.whl", hash = "sha256:f0a4641d3cf955324a89c04f3d94663aa4d638abe8f733ecd3582848e1c37035"}, 765 | {file = "Jinja2-2.11.2.tar.gz", hash = "sha256:89aab215427ef59c34ad58735269eb58b1a5808103067f7bb9d5836c651b3bb0"}, 766 | ] 767 | lazy-object-proxy = [ 768 | {file = "lazy-object-proxy-1.4.3.tar.gz", hash = "sha256:f3900e8a5de27447acbf900b4750b0ddfd7ec1ea7fbaf11dfa911141bc522af0"}, 769 | {file = "lazy_object_proxy-1.4.3-cp27-cp27m-macosx_10_13_x86_64.whl", hash = "sha256:a2238e9d1bb71a56cd710611a1614d1194dc10a175c1e08d75e1a7bcc250d442"}, 770 | {file = "lazy_object_proxy-1.4.3-cp27-cp27m-win32.whl", hash = "sha256:efa1909120ce98bbb3777e8b6f92237f5d5c8ea6758efea36a473e1d38f7d3e4"}, 771 | {file = "lazy_object_proxy-1.4.3-cp27-cp27m-win_amd64.whl", hash = "sha256:4677f594e474c91da97f489fea5b7daa17b5517190899cf213697e48d3902f5a"}, 772 | {file = "lazy_object_proxy-1.4.3-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:0c4b206227a8097f05c4dbdd323c50edf81f15db3b8dc064d08c62d37e1a504d"}, 773 | {file = "lazy_object_proxy-1.4.3-cp34-cp34m-manylinux1_x86_64.whl", hash = "sha256:d945239a5639b3ff35b70a88c5f2f491913eb94871780ebfabb2568bd58afc5a"}, 774 | {file = "lazy_object_proxy-1.4.3-cp34-cp34m-win32.whl", hash = "sha256:9651375199045a358eb6741df3e02a651e0330be090b3bc79f6d0de31a80ec3e"}, 775 | {file = "lazy_object_proxy-1.4.3-cp34-cp34m-win_amd64.whl", hash = "sha256:eba7011090323c1dadf18b3b689845fd96a61ba0a1dfbd7f24b921398affc357"}, 776 | {file = "lazy_object_proxy-1.4.3-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:48dab84ebd4831077b150572aec802f303117c8cc5c871e182447281ebf3ac50"}, 777 | {file = "lazy_object_proxy-1.4.3-cp35-cp35m-win32.whl", hash = "sha256:ca0a928a3ddbc5725be2dd1cf895ec0a254798915fb3a36af0964a0a4149e3db"}, 778 | {file = "lazy_object_proxy-1.4.3-cp35-cp35m-win_amd64.whl", hash = "sha256:194d092e6f246b906e8f70884e620e459fc54db3259e60cf69a4d66c3fda3449"}, 779 | {file = "lazy_object_proxy-1.4.3-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:97bb5884f6f1cdce0099f86b907aa41c970c3c672ac8b9c8352789e103cf3156"}, 780 | {file = "lazy_object_proxy-1.4.3-cp36-cp36m-win32.whl", hash = "sha256:cb2c7c57005a6804ab66f106ceb8482da55f5314b7fcb06551db1edae4ad1531"}, 781 | {file = "lazy_object_proxy-1.4.3-cp36-cp36m-win_amd64.whl", hash = "sha256:8d859b89baf8ef7f8bc6b00aa20316483d67f0b1cbf422f5b4dc56701c8f2ffb"}, 782 | {file = "lazy_object_proxy-1.4.3-cp37-cp37m-macosx_10_13_x86_64.whl", hash = "sha256:1be7e4c9f96948003609aa6c974ae59830a6baecc5376c25c92d7d697e684c08"}, 783 | {file = "lazy_object_proxy-1.4.3-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:d74bb8693bf9cf75ac3b47a54d716bbb1a92648d5f781fc799347cfc95952383"}, 784 | {file = "lazy_object_proxy-1.4.3-cp37-cp37m-win32.whl", hash = "sha256:9b15f3f4c0f35727d3a0fba4b770b3c4ebbb1fa907dbcc046a1d2799f3edd142"}, 785 | {file = "lazy_object_proxy-1.4.3-cp37-cp37m-win_amd64.whl", hash = "sha256:9254f4358b9b541e3441b007a0ea0764b9d056afdeafc1a5569eee1cc6c1b9ea"}, 786 | {file = "lazy_object_proxy-1.4.3-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:a6ae12d08c0bf9909ce12385803a543bfe99b95fe01e752536a60af2b7797c62"}, 787 | {file = "lazy_object_proxy-1.4.3-cp38-cp38-win32.whl", hash = "sha256:5541cada25cd173702dbd99f8e22434105456314462326f06dba3e180f203dfd"}, 788 | {file = "lazy_object_proxy-1.4.3-cp38-cp38-win_amd64.whl", hash = "sha256:59f79fef100b09564bc2df42ea2d8d21a64fdcda64979c0fa3db7bdaabaf6239"}, 789 | ] 790 | m2r = [ 791 | {file = "m2r-0.2.1.tar.gz", hash = "sha256:bf90bad66cda1164b17e5ba4a037806d2443f2a4d5ddc9f6a5554a0322aaed99"}, 792 | ] 793 | markupsafe = [ 794 | {file = "MarkupSafe-1.1.1-cp27-cp27m-macosx_10_6_intel.whl", hash = "sha256:09027a7803a62ca78792ad89403b1b7a73a01c8cb65909cd876f7fcebd79b161"}, 795 | {file = "MarkupSafe-1.1.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:e249096428b3ae81b08327a63a485ad0878de3fb939049038579ac0ef61e17e7"}, 796 | {file = "MarkupSafe-1.1.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:500d4957e52ddc3351cabf489e79c91c17f6e0899158447047588650b5e69183"}, 797 | {file = "MarkupSafe-1.1.1-cp27-cp27m-win32.whl", hash = "sha256:b2051432115498d3562c084a49bba65d97cf251f5a331c64a12ee7e04dacc51b"}, 798 | {file = "MarkupSafe-1.1.1-cp27-cp27m-win_amd64.whl", hash = "sha256:98c7086708b163d425c67c7a91bad6e466bb99d797aa64f965e9d25c12111a5e"}, 799 | {file = "MarkupSafe-1.1.1-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:cd5df75523866410809ca100dc9681e301e3c27567cf498077e8551b6d20e42f"}, 800 | {file = "MarkupSafe-1.1.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:43a55c2930bbc139570ac2452adf3d70cdbb3cfe5912c71cdce1c2c6bbd9c5d1"}, 801 | {file = "MarkupSafe-1.1.1-cp34-cp34m-macosx_10_6_intel.whl", hash = "sha256:1027c282dad077d0bae18be6794e6b6b8c91d58ed8a8d89a89d59693b9131db5"}, 802 | {file = "MarkupSafe-1.1.1-cp34-cp34m-manylinux1_i686.whl", hash = "sha256:62fe6c95e3ec8a7fad637b7f3d372c15ec1caa01ab47926cfdf7a75b40e0eac1"}, 803 | {file = "MarkupSafe-1.1.1-cp34-cp34m-manylinux1_x86_64.whl", hash = "sha256:88e5fcfb52ee7b911e8bb6d6aa2fd21fbecc674eadd44118a9cc3863f938e735"}, 804 | {file = "MarkupSafe-1.1.1-cp34-cp34m-win32.whl", hash = "sha256:ade5e387d2ad0d7ebf59146cc00c8044acbd863725f887353a10df825fc8ae21"}, 805 | {file = "MarkupSafe-1.1.1-cp34-cp34m-win_amd64.whl", hash = "sha256:09c4b7f37d6c648cb13f9230d847adf22f8171b1ccc4d5682398e77f40309235"}, 806 | {file = "MarkupSafe-1.1.1-cp35-cp35m-macosx_10_6_intel.whl", hash = "sha256:79855e1c5b8da654cf486b830bd42c06e8780cea587384cf6545b7d9ac013a0b"}, 807 | {file = "MarkupSafe-1.1.1-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:c8716a48d94b06bb3b2524c2b77e055fb313aeb4ea620c8dd03a105574ba704f"}, 808 | {file = "MarkupSafe-1.1.1-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:7c1699dfe0cf8ff607dbdcc1e9b9af1755371f92a68f706051cc8c37d447c905"}, 809 | {file = "MarkupSafe-1.1.1-cp35-cp35m-win32.whl", hash = "sha256:6dd73240d2af64df90aa7c4e7481e23825ea70af4b4922f8ede5b9e35f78a3b1"}, 810 | {file = "MarkupSafe-1.1.1-cp35-cp35m-win_amd64.whl", hash = "sha256:9add70b36c5666a2ed02b43b335fe19002ee5235efd4b8a89bfcf9005bebac0d"}, 811 | {file = "MarkupSafe-1.1.1-cp36-cp36m-macosx_10_6_intel.whl", hash = "sha256:24982cc2533820871eba85ba648cd53d8623687ff11cbb805be4ff7b4c971aff"}, 812 | {file = "MarkupSafe-1.1.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:00bc623926325b26bb9605ae9eae8a215691f33cae5df11ca5424f06f2d1f473"}, 813 | {file = "MarkupSafe-1.1.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:717ba8fe3ae9cc0006d7c451f0bb265ee07739daf76355d06366154ee68d221e"}, 814 | {file = "MarkupSafe-1.1.1-cp36-cp36m-win32.whl", hash = "sha256:535f6fc4d397c1563d08b88e485c3496cf5784e927af890fb3c3aac7f933ec66"}, 815 | {file = "MarkupSafe-1.1.1-cp36-cp36m-win_amd64.whl", hash = "sha256:b1282f8c00509d99fef04d8ba936b156d419be841854fe901d8ae224c59f0be5"}, 816 | {file = "MarkupSafe-1.1.1-cp37-cp37m-macosx_10_6_intel.whl", hash = "sha256:8defac2f2ccd6805ebf65f5eeb132adcf2ab57aa11fdf4c0dd5169a004710e7d"}, 817 | {file = "MarkupSafe-1.1.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:46c99d2de99945ec5cb54f23c8cd5689f6d7177305ebff350a58ce5f8de1669e"}, 818 | {file = "MarkupSafe-1.1.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:ba59edeaa2fc6114428f1637ffff42da1e311e29382d81b339c1817d37ec93c6"}, 819 | {file = "MarkupSafe-1.1.1-cp37-cp37m-win32.whl", hash = "sha256:b00c1de48212e4cc9603895652c5c410df699856a2853135b3967591e4beebc2"}, 820 | {file = "MarkupSafe-1.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:9bf40443012702a1d2070043cb6291650a0841ece432556f784f004937f0f32c"}, 821 | {file = "MarkupSafe-1.1.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6788b695d50a51edb699cb55e35487e430fa21f1ed838122d722e0ff0ac5ba15"}, 822 | {file = "MarkupSafe-1.1.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:cdb132fc825c38e1aeec2c8aa9338310d29d337bebbd7baa06889d09a60a1fa2"}, 823 | {file = "MarkupSafe-1.1.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:13d3144e1e340870b25e7b10b98d779608c02016d5184cfb9927a9f10c689f42"}, 824 | {file = "MarkupSafe-1.1.1-cp38-cp38-win32.whl", hash = "sha256:596510de112c685489095da617b5bcbbac7dd6384aeebeda4df6025d0256a81b"}, 825 | {file = "MarkupSafe-1.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:e8313f01ba26fbbe36c7be1966a7b7424942f670f38e666995b88d012765b9be"}, 826 | {file = "MarkupSafe-1.1.1.tar.gz", hash = "sha256:29872e92839765e546828bb7754a68c418d927cd064fd4708fab9fe9c8bb116b"}, 827 | ] 828 | mccabe = [ 829 | {file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"}, 830 | {file = "mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"}, 831 | ] 832 | mistune = [ 833 | {file = "mistune-0.8.4-py2.py3-none-any.whl", hash = "sha256:88a1051873018da288eee8538d476dffe1262495144b33ecb586c4ab266bb8d4"}, 834 | {file = "mistune-0.8.4.tar.gz", hash = "sha256:59a3429db53c50b5c6bcc8a07f8848cb00d7dc8bdb431a4ab41920d201d4756e"}, 835 | ] 836 | more-itertools = [ 837 | {file = "more-itertools-7.2.0.tar.gz", hash = "sha256:409cd48d4db7052af495b09dec721011634af3753ae1ef92d2b32f73a745f832"}, 838 | {file = "more_itertools-7.2.0-py3-none-any.whl", hash = "sha256:92b8c4b06dac4f0611c0729b2f2ede52b2e1bac1ab48f089c7ddc12e26bb60c4"}, 839 | ] 840 | packaging = [ 841 | {file = "packaging-20.4-py2.py3-none-any.whl", hash = "sha256:998416ba6962ae7fbd6596850b80e17859a5753ba17c32284f67bfff33784181"}, 842 | {file = "packaging-20.4.tar.gz", hash = "sha256:4357f74f47b9c12db93624a82154e9b120fa8293699949152b22065d556079f8"}, 843 | ] 844 | pathspec = [ 845 | {file = "pathspec-0.8.0-py2.py3-none-any.whl", hash = "sha256:7d91249d21749788d07a2d0f94147accd8f845507400749ea19c1ec9054a12b0"}, 846 | {file = "pathspec-0.8.0.tar.gz", hash = "sha256:da45173eb3a6f2a5a487efba21f050af2b41948be6ab52b6a1e3ff22bb8b7061"}, 847 | ] 848 | pluggy = [ 849 | {file = "pluggy-0.13.1-py2.py3-none-any.whl", hash = "sha256:966c145cd83c96502c3c3868f50408687b38434af77734af1e9ca461a4081d2d"}, 850 | {file = "pluggy-0.13.1.tar.gz", hash = "sha256:15b2acde666561e1298d71b523007ed7364de07029219b604cf808bfa1c765b0"}, 851 | ] 852 | py = [ 853 | {file = "py-1.8.1-py2.py3-none-any.whl", hash = "sha256:c20fdd83a5dbc0af9efd622bee9a5564e278f6380fffcacc43ba6f43db2813b0"}, 854 | {file = "py-1.8.1.tar.gz", hash = "sha256:5e27081401262157467ad6e7f851b7aa402c5852dbcb3dae06768434de5752aa"}, 855 | ] 856 | pycodestyle = [ 857 | {file = "pycodestyle-2.6.0-py2.py3-none-any.whl", hash = "sha256:2295e7b2f6b5bd100585ebcb1f616591b652db8a741695b3d8f5d28bdc934367"}, 858 | {file = "pycodestyle-2.6.0.tar.gz", hash = "sha256:c58a7d2815e0e8d7972bf1803331fb0152f867bd89adf8a01dfd55085434192e"}, 859 | ] 860 | pydantic = [ 861 | {file = "pydantic-1.5.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:2a6904e9f18dea58f76f16b95cba6a2f20b72d787abd84ecd67ebc526e61dce6"}, 862 | {file = "pydantic-1.5.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:da8099fca5ee339d5572cfa8af12cf0856ae993406f0b1eb9bb38c8a660e7416"}, 863 | {file = "pydantic-1.5.1-cp36-cp36m-manylinux2014_i686.whl", hash = "sha256:68dece67bff2b3a5cc188258e46b49f676a722304f1c6148ae08e9291e284d98"}, 864 | {file = "pydantic-1.5.1-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:ab863853cb502480b118187d670f753be65ec144e1654924bec33d63bc8b3ce2"}, 865 | {file = "pydantic-1.5.1-cp36-cp36m-win_amd64.whl", hash = "sha256:2007eb062ed0e57875ce8ead12760a6e44bf5836e6a1a7ea81d71eeecf3ede0f"}, 866 | {file = "pydantic-1.5.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:20a15a303ce1e4d831b4e79c17a4a29cb6740b12524f5bba3ea363bff65732bc"}, 867 | {file = "pydantic-1.5.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:473101121b1bd454c8effc9fe66d54812fdc128184d9015c5aaa0d4e58a6d338"}, 868 | {file = "pydantic-1.5.1-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:9be755919258d5d168aeffbe913ed6e8bd562e018df7724b68cabdee3371e331"}, 869 | {file = "pydantic-1.5.1-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:b96ce81c4b5ca62ab81181212edfd057beaa41411cd9700fbcb48a6ba6564b4e"}, 870 | {file = "pydantic-1.5.1-cp37-cp37m-win_amd64.whl", hash = "sha256:93b9f265329d9827f39f0fca68f5d72cc8321881cdc519a1304fa73b9f8a75bd"}, 871 | {file = "pydantic-1.5.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e2c753d355126ddd1eefeb167fa61c7037ecd30b98e7ebecdc0d1da463b4ea09"}, 872 | {file = "pydantic-1.5.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:8433dbb87246c0f562af75d00fa80155b74e4f6924b0db6a2078a3cd2f11c6c4"}, 873 | {file = "pydantic-1.5.1-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:0a1cdf24e567d42dc762d3fed399bd211a13db2e8462af9dfa93b34c41648efb"}, 874 | {file = "pydantic-1.5.1-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:8be325fc9da897029ee48d1b5e40df817d97fe969f3ac3fd2434ba7e198c55d5"}, 875 | {file = "pydantic-1.5.1-cp38-cp38-win_amd64.whl", hash = "sha256:3714a4056f5bdbecf3a41e0706ec9b228c9513eee2ad884dc2c568c4dfa540e9"}, 876 | {file = "pydantic-1.5.1-py36.py37.py38-none-any.whl", hash = "sha256:70f27d2f0268f490fe3de0a9b6fca7b7492b8fd6623f9fecd25b221ebee385e3"}, 877 | {file = "pydantic-1.5.1.tar.gz", hash = "sha256:f0018613c7a0d19df3240c2a913849786f21b6539b9f23d85ce4067489dfacfa"}, 878 | ] 879 | pyflakes = [ 880 | {file = "pyflakes-2.2.0-py2.py3-none-any.whl", hash = "sha256:0d94e0e05a19e57a99444b6ddcf9a6eb2e5c68d3ca1e98e90707af8152c90a92"}, 881 | {file = "pyflakes-2.2.0.tar.gz", hash = "sha256:35b2d75ee967ea93b55750aa9edbbf72813e06a66ba54438df2cfac9e3c27fc8"}, 882 | ] 883 | pygments = [ 884 | {file = "Pygments-2.6.1-py3-none-any.whl", hash = "sha256:ff7a40b4860b727ab48fad6360eb351cc1b33cbf9b15a0f689ca5353e9463324"}, 885 | {file = "Pygments-2.6.1.tar.gz", hash = "sha256:647344a061c249a3b74e230c739f434d7ea4d8b1d5f3721bc0f3558049b38f44"}, 886 | ] 887 | pyparsing = [ 888 | {file = "pyparsing-2.4.7-py2.py3-none-any.whl", hash = "sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b"}, 889 | {file = "pyparsing-2.4.7.tar.gz", hash = "sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1"}, 890 | ] 891 | pytest = [ 892 | {file = "pytest-5.4.2-py3-none-any.whl", hash = "sha256:95c710d0a72d91c13fae35dce195633c929c3792f54125919847fdcdf7caa0d3"}, 893 | {file = "pytest-5.4.2.tar.gz", hash = "sha256:eb2b5e935f6a019317e455b6da83dd8650ac9ffd2ee73a7b657a30873d67a698"}, 894 | ] 895 | pytest-clarity = [ 896 | {file = "pytest-clarity-0.3.0a0.tar.gz", hash = "sha256:5cc99e3d9b7969dfe17e5f6072d45a917c59d363b679686d3c958a1ded2e4dcf"}, 897 | ] 898 | pytz = [ 899 | {file = "pytz-2020.1-py2.py3-none-any.whl", hash = "sha256:a494d53b6d39c3c6e44c3bec237336e14305e4f29bbf800b599253057fbb79ed"}, 900 | {file = "pytz-2020.1.tar.gz", hash = "sha256:c35965d010ce31b23eeb663ed3cc8c906275d6be1a34393a1d73a41febf4a048"}, 901 | ] 902 | pyyaml = [ 903 | {file = "PyYAML-5.3.1-cp27-cp27m-win32.whl", hash = "sha256:74809a57b329d6cc0fdccee6318f44b9b8649961fa73144a98735b0aaf029f1f"}, 904 | {file = "PyYAML-5.3.1-cp27-cp27m-win_amd64.whl", hash = "sha256:240097ff019d7c70a4922b6869d8a86407758333f02203e0fc6ff79c5dcede76"}, 905 | {file = "PyYAML-5.3.1-cp35-cp35m-win32.whl", hash = "sha256:4f4b913ca1a7319b33cfb1369e91e50354d6f07a135f3b901aca02aa95940bd2"}, 906 | {file = "PyYAML-5.3.1-cp35-cp35m-win_amd64.whl", hash = "sha256:cc8955cfbfc7a115fa81d85284ee61147059a753344bc51098f3ccd69b0d7e0c"}, 907 | {file = "PyYAML-5.3.1-cp36-cp36m-win32.whl", hash = "sha256:7739fc0fa8205b3ee8808aea45e968bc90082c10aef6ea95e855e10abf4a37b2"}, 908 | {file = "PyYAML-5.3.1-cp36-cp36m-win_amd64.whl", hash = "sha256:69f00dca373f240f842b2931fb2c7e14ddbacd1397d57157a9b005a6a9942648"}, 909 | {file = "PyYAML-5.3.1-cp37-cp37m-win32.whl", hash = "sha256:d13155f591e6fcc1ec3b30685d50bf0711574e2c0dfffd7644babf8b5102ca1a"}, 910 | {file = "PyYAML-5.3.1-cp37-cp37m-win_amd64.whl", hash = "sha256:73f099454b799e05e5ab51423c7bcf361c58d3206fa7b0d555426b1f4d9a3eaf"}, 911 | {file = "PyYAML-5.3.1-cp38-cp38-win32.whl", hash = "sha256:06a0d7ba600ce0b2d2fe2e78453a470b5a6e000a985dd4a4e54e436cc36b0e97"}, 912 | {file = "PyYAML-5.3.1-cp38-cp38-win_amd64.whl", hash = "sha256:95f71d2af0ff4227885f7a6605c37fd53d3a106fcab511b8860ecca9fcf400ee"}, 913 | {file = "PyYAML-5.3.1.tar.gz", hash = "sha256:b8eac752c5e14d3eca0e6dd9199cd627518cb5ec06add0de9d32baeee6fe645d"}, 914 | ] 915 | regex = [ 916 | {file = "regex-2020.5.14-cp27-cp27m-win32.whl", hash = "sha256:e565569fc28e3ba3e475ec344d87ed3cd8ba2d575335359749298a0899fe122e"}, 917 | {file = "regex-2020.5.14-cp27-cp27m-win_amd64.whl", hash = "sha256:d466967ac8e45244b9dfe302bbe5e3337f8dc4dec8d7d10f5e950d83b140d33a"}, 918 | {file = "regex-2020.5.14-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:27ff7325b297fb6e5ebb70d10437592433601c423f5acf86e5bc1ee2919b9561"}, 919 | {file = "regex-2020.5.14-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:ea55b80eb0d1c3f1d8d784264a6764f931e172480a2f1868f2536444c5f01e01"}, 920 | {file = "regex-2020.5.14-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:c9bce6e006fbe771a02bda468ec40ffccbf954803b470a0345ad39c603402577"}, 921 | {file = "regex-2020.5.14-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:d881c2e657c51d89f02ae4c21d9adbef76b8325fe4d5cf0e9ad62f850f3a98fd"}, 922 | {file = "regex-2020.5.14-cp36-cp36m-win32.whl", hash = "sha256:99568f00f7bf820c620f01721485cad230f3fb28f57d8fbf4a7967ec2e446994"}, 923 | {file = "regex-2020.5.14-cp36-cp36m-win_amd64.whl", hash = "sha256:70c14743320a68c5dac7fc5a0f685be63bc2024b062fe2aaccc4acc3d01b14a1"}, 924 | {file = "regex-2020.5.14-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:a7c37f048ec3920783abab99f8f4036561a174f1314302ccfa4e9ad31cb00eb4"}, 925 | {file = "regex-2020.5.14-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:89d76ce33d3266173f5be80bd4efcbd5196cafc34100fdab814f9b228dee0fa4"}, 926 | {file = "regex-2020.5.14-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:51f17abbe973c7673a61863516bdc9c0ef467407a940f39501e786a07406699c"}, 927 | {file = "regex-2020.5.14-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:ce5cc53aa9fbbf6712e92c7cf268274eaff30f6bd12a0754e8133d85a8fb0f5f"}, 928 | {file = "regex-2020.5.14-cp37-cp37m-win32.whl", hash = "sha256:8044d1c085d49673aadb3d7dc20ef5cb5b030c7a4fa253a593dda2eab3059929"}, 929 | {file = "regex-2020.5.14-cp37-cp37m-win_amd64.whl", hash = "sha256:c2062c7d470751b648f1cacc3f54460aebfc261285f14bc6da49c6943bd48bdd"}, 930 | {file = "regex-2020.5.14-cp38-cp38-manylinux1_i686.whl", hash = "sha256:329ba35d711e3428db6b45a53b1b13a0a8ba07cbbcf10bbed291a7da45f106c3"}, 931 | {file = "regex-2020.5.14-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:579ea215c81d18da550b62ff97ee187b99f1b135fd894a13451e00986a080cad"}, 932 | {file = "regex-2020.5.14-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:3a9394197664e35566242686d84dfd264c07b20f93514e2e09d3c2b3ffdf78fe"}, 933 | {file = "regex-2020.5.14-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:ce367d21f33e23a84fb83a641b3834dd7dd8e9318ad8ff677fbfae5915a239f7"}, 934 | {file = "regex-2020.5.14-cp38-cp38-win32.whl", hash = "sha256:1386e75c9d1574f6aa2e4eb5355374c8e55f9aac97e224a8a5a6abded0f9c927"}, 935 | {file = "regex-2020.5.14-cp38-cp38-win_amd64.whl", hash = "sha256:7e61be8a2900897803c293247ef87366d5df86bf701083b6c43119c7c6c99108"}, 936 | {file = "regex-2020.5.14.tar.gz", hash = "sha256:ce450ffbfec93821ab1fea94779a8440e10cf63819be6e176eb1973a6017aff5"}, 937 | ] 938 | requests = [ 939 | {file = "requests-2.23.0-py2.py3-none-any.whl", hash = "sha256:43999036bfa82904b6af1d99e4882b560e5e2c68e5c4b0aa03b655f3d7d73fee"}, 940 | {file = "requests-2.23.0.tar.gz", hash = "sha256:b3f43d496c6daba4493e7c431722aeb7dbc6288f52a6e04e7b6023b0247817e6"}, 941 | ] 942 | six = [ 943 | {file = "six-1.15.0-py2.py3-none-any.whl", hash = "sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced"}, 944 | {file = "six-1.15.0.tar.gz", hash = "sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259"}, 945 | ] 946 | snowballstemmer = [ 947 | {file = "snowballstemmer-2.0.0-py2.py3-none-any.whl", hash = "sha256:209f257d7533fdb3cb73bdbd24f436239ca3b2fa67d56f6ff88e86be08cc5ef0"}, 948 | {file = "snowballstemmer-2.0.0.tar.gz", hash = "sha256:df3bac3df4c2c01363f3dd2cfa78cce2840a79b9f1c2d2de9ce8d31683992f52"}, 949 | ] 950 | sphinx = [ 951 | {file = "Sphinx-2.4.4-py3-none-any.whl", hash = "sha256:fc312670b56cb54920d6cc2ced455a22a547910de10b3142276495ced49231cb"}, 952 | {file = "Sphinx-2.4.4.tar.gz", hash = "sha256:b4c750d546ab6d7e05bdff6ac24db8ae3e8b8253a3569b754e445110a0a12b66"}, 953 | ] 954 | sphinx-autoapi = [ 955 | {file = "sphinx-autoapi-1.3.0.tar.gz", hash = "sha256:90ee5e312edaa204d7c0ce5312a8e56898d34620616662e1aec26e33ea84f758"}, 956 | {file = "sphinx_autoapi-1.3.0-py2.py3-none-any.whl", hash = "sha256:abba752d7de5fea52ebc79198e4a3798ce76b42cfd89e2b0a41b6068885704a6"}, 957 | ] 958 | sphinx-rst-builder = [ 959 | {file = "sphinx-rst-builder-0.0.3.tar.gz", hash = "sha256:d7bd18f8f94e08b1a2267e862759358cb56bb72f0ac00d1fb81e68dbcf646011"}, 960 | {file = "sphinx_rst_builder-0.0.3-py3-none-any.whl", hash = "sha256:67d53c54d879b0adbf0b747b05ed09db11061f4dca4e45d721da669165e421bf"}, 961 | ] 962 | sphinx-rtd-theme = [ 963 | {file = "sphinx_rtd_theme-0.4.3-py2.py3-none-any.whl", hash = "sha256:00cf895504a7895ee433807c62094cf1e95f065843bf3acd17037c3e9a2becd4"}, 964 | {file = "sphinx_rtd_theme-0.4.3.tar.gz", hash = "sha256:728607e34d60456d736cc7991fd236afb828b21b82f956c5ea75f94c8414040a"}, 965 | ] 966 | sphinxcontrib-applehelp = [ 967 | {file = "sphinxcontrib-applehelp-1.0.2.tar.gz", hash = "sha256:a072735ec80e7675e3f432fcae8610ecf509c5f1869d17e2eecff44389cdbc58"}, 968 | {file = "sphinxcontrib_applehelp-1.0.2-py2.py3-none-any.whl", hash = "sha256:806111e5e962be97c29ec4c1e7fe277bfd19e9652fb1a4392105b43e01af885a"}, 969 | ] 970 | sphinxcontrib-devhelp = [ 971 | {file = "sphinxcontrib-devhelp-1.0.2.tar.gz", hash = "sha256:ff7f1afa7b9642e7060379360a67e9c41e8f3121f2ce9164266f61b9f4b338e4"}, 972 | {file = "sphinxcontrib_devhelp-1.0.2-py2.py3-none-any.whl", hash = "sha256:8165223f9a335cc1af7ffe1ed31d2871f325254c0423bc0c4c7cd1c1e4734a2e"}, 973 | ] 974 | sphinxcontrib-htmlhelp = [ 975 | {file = "sphinxcontrib-htmlhelp-1.0.3.tar.gz", hash = "sha256:e8f5bb7e31b2dbb25b9cc435c8ab7a79787ebf7f906155729338f3156d93659b"}, 976 | {file = "sphinxcontrib_htmlhelp-1.0.3-py2.py3-none-any.whl", hash = "sha256:3c0bc24a2c41e340ac37c85ced6dafc879ab485c095b1d65d2461ac2f7cca86f"}, 977 | ] 978 | sphinxcontrib-jsmath = [ 979 | {file = "sphinxcontrib-jsmath-1.0.1.tar.gz", hash = "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8"}, 980 | {file = "sphinxcontrib_jsmath-1.0.1-py2.py3-none-any.whl", hash = "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178"}, 981 | ] 982 | sphinxcontrib-qthelp = [ 983 | {file = "sphinxcontrib-qthelp-1.0.3.tar.gz", hash = "sha256:4c33767ee058b70dba89a6fc5c1892c0d57a54be67ddd3e7875a18d14cba5a72"}, 984 | {file = "sphinxcontrib_qthelp-1.0.3-py2.py3-none-any.whl", hash = "sha256:bd9fc24bcb748a8d51fd4ecaade681350aa63009a347a8c14e637895444dfab6"}, 985 | ] 986 | sphinxcontrib-serializinghtml = [ 987 | {file = "sphinxcontrib-serializinghtml-1.1.4.tar.gz", hash = "sha256:eaa0eccc86e982a9b939b2b82d12cc5d013385ba5eadcc7e4fed23f4405f77bc"}, 988 | {file = "sphinxcontrib_serializinghtml-1.1.4-py2.py3-none-any.whl", hash = "sha256:f242a81d423f59617a8e5cf16f5d4d74e28ee9a66f9e5b637a18082991db5a9a"}, 989 | ] 990 | termcolor = [ 991 | {file = "termcolor-1.1.0.tar.gz", hash = "sha256:1d6d69ce66211143803fbc56652b41d73b4a400a2891d7bf7a1cdf4c02de613b"}, 992 | ] 993 | toml = [ 994 | {file = "toml-0.10.1-py2.py3-none-any.whl", hash = "sha256:bda89d5935c2eac546d648028b9901107a595863cb36bae0c73ac804a9b4ce88"}, 995 | {file = "toml-0.10.1.tar.gz", hash = "sha256:926b612be1e5ce0634a2ca03470f95169cf16f939018233a670519cb4ac58b0f"}, 996 | ] 997 | tomlkit = [ 998 | {file = "tomlkit-0.5.11-py2.py3-none-any.whl", hash = "sha256:4e1bd6c9197d984528f9ff0cc9db667c317d8881288db50db20eeeb0f6b0380b"}, 999 | {file = "tomlkit-0.5.11.tar.gz", hash = "sha256:f044eda25647882e5ef22b43a1688fb6ab12af2fc50e8456cdfc751c873101cf"}, 1000 | ] 1001 | typed-ast = [ 1002 | {file = "typed_ast-1.4.1-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:73d785a950fc82dd2a25897d525d003f6378d1cb23ab305578394694202a58c3"}, 1003 | {file = "typed_ast-1.4.1-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:aaee9905aee35ba5905cfb3c62f3e83b3bec7b39413f0a7f19be4e547ea01ebb"}, 1004 | {file = "typed_ast-1.4.1-cp35-cp35m-win32.whl", hash = "sha256:0c2c07682d61a629b68433afb159376e24e5b2fd4641d35424e462169c0a7919"}, 1005 | {file = "typed_ast-1.4.1-cp35-cp35m-win_amd64.whl", hash = "sha256:4083861b0aa07990b619bd7ddc365eb7fa4b817e99cf5f8d9cf21a42780f6e01"}, 1006 | {file = "typed_ast-1.4.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:269151951236b0f9a6f04015a9004084a5ab0d5f19b57de779f908621e7d8b75"}, 1007 | {file = "typed_ast-1.4.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:24995c843eb0ad11a4527b026b4dde3da70e1f2d8806c99b7b4a7cf491612652"}, 1008 | {file = "typed_ast-1.4.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:fe460b922ec15dd205595c9b5b99e2f056fd98ae8f9f56b888e7a17dc2b757e7"}, 1009 | {file = "typed_ast-1.4.1-cp36-cp36m-win32.whl", hash = "sha256:4e3e5da80ccbebfff202a67bf900d081906c358ccc3d5e3c8aea42fdfdfd51c1"}, 1010 | {file = "typed_ast-1.4.1-cp36-cp36m-win_amd64.whl", hash = "sha256:249862707802d40f7f29f6e1aad8d84b5aa9e44552d2cc17384b209f091276aa"}, 1011 | {file = "typed_ast-1.4.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8ce678dbaf790dbdb3eba24056d5364fb45944f33553dd5869b7580cdbb83614"}, 1012 | {file = "typed_ast-1.4.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:c9e348e02e4d2b4a8b2eedb48210430658df6951fa484e59de33ff773fbd4b41"}, 1013 | {file = "typed_ast-1.4.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:bcd3b13b56ea479b3650b82cabd6b5343a625b0ced5429e4ccad28a8973f301b"}, 1014 | {file = "typed_ast-1.4.1-cp37-cp37m-win32.whl", hash = "sha256:d5d33e9e7af3b34a40dc05f498939f0ebf187f07c385fd58d591c533ad8562fe"}, 1015 | {file = "typed_ast-1.4.1-cp37-cp37m-win_amd64.whl", hash = "sha256:0666aa36131496aed8f7be0410ff974562ab7eeac11ef351def9ea6fa28f6355"}, 1016 | {file = "typed_ast-1.4.1-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:d205b1b46085271b4e15f670058ce182bd1199e56b317bf2ec004b6a44f911f6"}, 1017 | {file = "typed_ast-1.4.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:6daac9731f172c2a22ade6ed0c00197ee7cc1221aa84cfdf9c31defeb059a907"}, 1018 | {file = "typed_ast-1.4.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:498b0f36cc7054c1fead3d7fc59d2150f4d5c6c56ba7fb150c013fbc683a8d2d"}, 1019 | {file = "typed_ast-1.4.1-cp38-cp38-win32.whl", hash = "sha256:715ff2f2df46121071622063fc7543d9b1fd19ebfc4f5c8895af64a77a8c852c"}, 1020 | {file = "typed_ast-1.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:fc0fea399acb12edbf8a628ba8d2312f583bdbdb3335635db062fa98cf71fca4"}, 1021 | {file = "typed_ast-1.4.1-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:d43943ef777f9a1c42bf4e552ba23ac77a6351de620aa9acf64ad54933ad4d34"}, 1022 | {file = "typed_ast-1.4.1.tar.gz", hash = "sha256:8c8aaad94455178e3187ab22c8b01a3837f8ee50e09cf31f1ba129eb293ec30b"}, 1023 | ] 1024 | typing-extensions = [ 1025 | {file = "typing_extensions-3.7.4.2-py2-none-any.whl", hash = "sha256:f8d2bd89d25bc39dabe7d23df520442fa1d8969b82544370e03d88b5a591c392"}, 1026 | {file = "typing_extensions-3.7.4.2-py3-none-any.whl", hash = "sha256:6e95524d8a547a91e08f404ae485bbb71962de46967e1b71a0cb89af24e761c5"}, 1027 | {file = "typing_extensions-3.7.4.2.tar.gz", hash = "sha256:79ee589a3caca649a9bfd2a8de4709837400dfa00b6cc81962a1e6a1815969ae"}, 1028 | ] 1029 | unidecode = [ 1030 | {file = "Unidecode-1.1.1-py2.py3-none-any.whl", hash = "sha256:1d7a042116536098d05d599ef2b8616759f02985c85b4fef50c78a5aaf10822a"}, 1031 | {file = "Unidecode-1.1.1.tar.gz", hash = "sha256:2b6aab710c2a1647e928e36d69c21e76b453cd455f4e2621000e54b2a9b8cce8"}, 1032 | ] 1033 | urllib3 = [ 1034 | {file = "urllib3-1.25.9-py2.py3-none-any.whl", hash = "sha256:88206b0eb87e6d677d424843ac5209e3fb9d0190d0ee169599165ec25e9d9115"}, 1035 | {file = "urllib3-1.25.9.tar.gz", hash = "sha256:3018294ebefce6572a474f0604c2021e33b3fd8006ecd11d62107a5d2a963527"}, 1036 | ] 1037 | wcwidth = [ 1038 | {file = "wcwidth-0.1.9-py2.py3-none-any.whl", hash = "sha256:cafe2186b3c009a04067022ce1dcd79cb38d8d65ee4f4791b8888d6599d1bbe1"}, 1039 | {file = "wcwidth-0.1.9.tar.gz", hash = "sha256:ee73862862a156bf77ff92b09034fc4825dd3af9cf81bc5b360668d425f3c5f1"}, 1040 | ] 1041 | wrapt = [ 1042 | {file = "wrapt-1.12.1.tar.gz", hash = "sha256:b62ffa81fb85f4332a4f609cab4ac40709470da05643a082ec1eb88e6d9b97d7"}, 1043 | ] 1044 | zipp = [ 1045 | {file = "zipp-3.1.0-py3-none-any.whl", hash = "sha256:aa36550ff0c0b7ef7fa639055d797116ee891440eac1a56f378e2d3179e0320b"}, 1046 | {file = "zipp-3.1.0.tar.gz", hash = "sha256:c599e4d75c98f6798c509911d08a22e6c021d074469042177c8c86fb92eefd96"}, 1047 | ] 1048 | -------------------------------------------------------------------------------- /pydantic_settings/__init__.py: -------------------------------------------------------------------------------- 1 | from .attrs_docs import with_attrs_docs # noqa: F401 2 | from .base import BaseSettingsModel # noqa: F401 3 | from .errors import ( # noqa: F401 4 | LoadingError, 5 | LoadingParseError, 6 | LoadingValidationError, 7 | ) 8 | from .load import load_settings # noqa: F401 9 | from .types import TextLocation # noqa: F401 10 | 11 | __version__ = '0.1.0' 12 | __author__ = "Daniel Daniel's " 13 | -------------------------------------------------------------------------------- /pydantic_settings/attrs_docs.py: -------------------------------------------------------------------------------- 1 | from typing import Callable, Optional, Type, TypeVar, Union, overload 2 | 3 | from class_doc import extract_docs_from_cls_obj 4 | 5 | from pydantic_settings.types import AnyPydanticModel, is_pydantic_dataclass 6 | 7 | 8 | def apply_attributes_docs( 9 | model: Type[AnyPydanticModel], *, override_existing: bool = True 10 | ) -> None: 11 | """ 12 | Apply model attributes documentation in-place. Resulted docs are placed 13 | inside :code:`field.schema.description` for *pydantic* model field. 14 | 15 | :param model: any pydantic model 16 | :param override_existing: override existing descriptions 17 | """ 18 | if is_pydantic_dataclass(model): 19 | apply_attributes_docs( 20 | model.__pydantic_model__, override_existing=override_existing 21 | ) 22 | return 23 | 24 | docs = extract_docs_from_cls_obj(model) 25 | 26 | for field in model.__fields__.values(): 27 | if field.field_info.description and not override_existing: 28 | continue 29 | 30 | try: 31 | field.field_info.description = '\n'.join(docs[field.name]) 32 | except KeyError: 33 | pass 34 | 35 | 36 | MC = TypeVar('MC', bound=AnyPydanticModel) 37 | _MC = TypeVar('_MC', bound=AnyPydanticModel) 38 | 39 | 40 | @overload 41 | def with_attrs_docs(model_cls: Type[MC]) -> Type[MC]: 42 | ... 43 | 44 | 45 | @overload 46 | def with_attrs_docs( 47 | *, override_existing: bool = True 48 | ) -> Callable[[Type[MC]], Type[MC]]: 49 | ... 50 | 51 | 52 | def with_attrs_docs( 53 | model_cls: Optional[Type[MC]] = None, *, override_existing: bool = True 54 | ) -> Union[Callable[[Type[MC]], Type[MC]], Type[MC]]: 55 | """ 56 | Applies :py:func:`.apply_attributes_docs`. 57 | """ 58 | 59 | def decorator(maybe_model_cls: Type[_MC]) -> Type[_MC]: 60 | apply_attributes_docs( 61 | maybe_model_cls, override_existing=override_existing 62 | ) 63 | return maybe_model_cls 64 | 65 | if model_cls is None: 66 | return decorator 67 | return decorator(model_cls) 68 | -------------------------------------------------------------------------------- /pydantic_settings/base.py: -------------------------------------------------------------------------------- 1 | from typing import Any, ClassVar, Mapping, Type, TypeVar, cast 2 | 3 | from pydantic import BaseModel, ValidationError 4 | 5 | from pydantic_settings.attrs_docs import apply_attributes_docs 6 | from pydantic_settings.decoder import json 7 | from pydantic_settings.errors import ExtendedErrorWrapper, with_errs_locations 8 | from pydantic_settings.restorer import ModelShapeRestorer 9 | from pydantic_settings.utils import deep_merge_mappings 10 | 11 | T = TypeVar('T', bound='BaseSettingsModel') 12 | 13 | 14 | class BaseSettingsModel(BaseModel): 15 | """ 16 | Thin wrapper which combines :py:class:`pydantic.BaseModel` and 17 | :py:class:`.ModelShapeRestorer`. 18 | """ 19 | 20 | class Config: 21 | """ 22 | Model behaviour configured with `Config` namespace traditionally for 23 | *pydantic*. 24 | """ 25 | 26 | env_prefix: str = 'APP' 27 | """ 28 | Expects that actual environ variables begins with given prefix, ex: 29 | :code:`'APP_FOO'` become :code:`model_instance['foo']`. Respects case 30 | sensitivity option. 31 | """ 32 | 33 | env_case_sensitive: bool = False 34 | """ 35 | Whether :py:class:`.ModelShapeRestorer` 36 | will take environment variable case into account. 37 | """ 38 | 39 | complex_inline_values_decoder = json.decode_document 40 | """ 41 | Used to decode bunch of values for some nested namespace. Assume some 42 | nested namespace with 'foo' location and shape like 43 | :code:`{"bar": 1, "baz": "val"}`, then you able to set whole value with 44 | env variable :command:`export APP_FOO='{"bar": 2, "baz": "new_val"}'`. 45 | """ 46 | 47 | build_attr_docs: bool = True 48 | """ 49 | Set model field descriptions taken from attributes docstrings. Read 50 | :py:func:`.apply_attributes_docs` for further details. 51 | """ 52 | 53 | override_exited_attrs_docs: bool = False 54 | """ 55 | Override existed fields descriptions by attributes docs. 56 | """ 57 | 58 | shape_restorer: ClassVar[ModelShapeRestorer] 59 | 60 | def __init_subclass__(cls, **kwargs): 61 | config = cast(cls.Config, cls.__config__) 62 | cls.shape_restorer = ModelShapeRestorer( 63 | cls, 64 | config.env_prefix, 65 | config.env_case_sensitive, 66 | config.complex_inline_values_decoder, 67 | ) 68 | if config.build_attr_docs: 69 | apply_attributes_docs( 70 | cls, override_existing=config.override_exited_attrs_docs 71 | ) 72 | 73 | @classmethod 74 | def from_env( 75 | cls: Type[T], 76 | environ: Mapping[str, str], 77 | *, 78 | ignore_restore_errs: bool = True, 79 | **values: Any 80 | ) -> T: 81 | """ 82 | Build model instance from given values and environ. 83 | 84 | :param environ: environment-like flat mapping, take precedence over 85 | values 86 | :param ignore_restore_errs: ignore errors happened while restoring 87 | flat-mapping 88 | :param values: values 89 | :raises ValidationError: in case of failure 90 | :return: model instance 91 | """ 92 | env_vars_applied, env_apply_errs = cls.shape_restorer.restore(environ) 93 | validation_err = None 94 | try: 95 | res = cls(**deep_merge_mappings(env_vars_applied, values)) 96 | except ValidationError as err: 97 | res = None 98 | validation_err = err 99 | 100 | if len(env_apply_errs) > 0 and not ignore_restore_errs: 101 | env_errs_as_ew = [ 102 | ExtendedErrorWrapper( 103 | env_err.__cause__ or env_err, 104 | loc=tuple(env_err.loc), 105 | source_loc=(env_err.key, None), 106 | ) 107 | for env_err in env_apply_errs 108 | ] 109 | if validation_err is not None: 110 | validation_err.raw_errors += env_errs_as_ew 111 | else: 112 | validation_err = ValidationError(env_errs_as_ew, cls) 113 | 114 | if validation_err: 115 | raise with_errs_locations(cls, validation_err, env_vars_applied) 116 | 117 | return res 118 | -------------------------------------------------------------------------------- /pydantic_settings/decoder/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | *yaml*, *json* and *toml* decoders providing source value location. 3 | """ 4 | from typing import Callable, TextIO, Union 5 | 6 | from attr import dataclass 7 | 8 | from .common import ( # noqa: F401 9 | ListExpectError, 10 | LocationLookupError, 11 | MappingExpectError, 12 | ParsingError, 13 | TextValues, 14 | ) 15 | 16 | 17 | @dataclass 18 | class DecoderMeta: 19 | """Decoder matadata""" 20 | 21 | name: str 22 | values_loader: Callable[[Union[str, TextIO]], TextValues] 23 | 24 | 25 | def _get_json() -> DecoderMeta: 26 | from .json import decode_document 27 | 28 | return DecoderMeta('json', decode_document) 29 | 30 | 31 | def _get_yaml() -> DecoderMeta: 32 | from .yaml import decode_document 33 | 34 | return DecoderMeta('yaml', decode_document) 35 | 36 | 37 | def _get_toml() -> DecoderMeta: 38 | raise NotImplementedError('TOML decoder still not implemented') 39 | 40 | 41 | class DecoderNotFoundError(TypeError): 42 | """Error for cases when requested decoder not found""" 43 | 44 | 45 | class DecoderMissingRequirementError(DecoderNotFoundError): 46 | """Error for cases when requested decoder requirement is missing""" 47 | 48 | 49 | def _guard_import_error( 50 | decoder_loader: Callable[[], DecoderMeta], decoder_type: str 51 | ) -> DecoderMeta: 52 | try: 53 | return decoder_loader() 54 | except ImportError as err: 55 | raise DecoderMissingRequirementError( 56 | f'''"{decoder_type}" doesn't supported because "{ 57 | err.path 58 | }" isn't installed''' 59 | ) from err 60 | 61 | 62 | def get_decoder(decoder_type: str) -> DecoderMeta: 63 | """ 64 | Get decoder for given type-hint. Import decoders lazily to make 65 | dependencies "soft-wired". 66 | 67 | :param decoder_type: any kind of decoder hint: file extension, mime-type or 68 | common name 69 | :return: decoder metadata 70 | """ 71 | if decoder_type in ('json', '.json', 'application/json'): 72 | return _guard_import_error(_get_json, 'json') 73 | if decoder_type in ( 74 | 'yaml', 75 | 'yml', 76 | '.yaml', 77 | '.yml', 78 | 'text/x-yaml', 79 | 'applicaiton/x-yaml', 80 | 'text/yaml', 81 | 'application/yaml', 82 | ): 83 | return _guard_import_error(_get_yaml, 'yaml') 84 | if decoder_type in ('.toml', 'toml', 'text/toml'): 85 | return _guard_import_error(_get_toml, 'toml') 86 | raise DecoderNotFoundError(f'Loader "{decoder_type}" isn\'t supported') 87 | -------------------------------------------------------------------------------- /pydantic_settings/decoder/common.py: -------------------------------------------------------------------------------- 1 | from typing import Dict, Optional 2 | 3 | from attr import dataclass 4 | 5 | from pydantic_settings.types import ( 6 | Json, 7 | JsonLocation, 8 | SourceValueLocationProvider, 9 | TextLocation, 10 | ) 11 | 12 | 13 | @dataclass 14 | class LocationLookupError(ValueError): 15 | key: JsonLocation 16 | part_pos: int 17 | 18 | def __attrs_post_init__(self): 19 | self.args = (self.key, self.part_pos) 20 | 21 | def __repr__(self) -> str: 22 | key_repr = ']['.join(repr(part) for part in self.key) 23 | return f"Requested key {key_repr} can't be found within document" 24 | 25 | 26 | class MappingExpectError(LocationLookupError): 27 | pass 28 | 29 | 30 | class ListExpectError(LocationLookupError): 31 | pass 32 | 33 | 34 | class TextValues(Dict[str, Json]): 35 | __slots__ = ('location_finder',) 36 | 37 | def __init__( 38 | self, finder: SourceValueLocationProvider[TextLocation], **values: Json 39 | ): 40 | super().__init__(**values) 41 | self.location_finder = finder 42 | 43 | def get_location(self, val_loc: JsonLocation) -> TextLocation: 44 | return self.location_finder.get_location(val_loc) 45 | 46 | 47 | class ParsingError(ValueError): 48 | """ 49 | General wrapper for text parsing errors which also provides error location 50 | inside a source. 51 | """ 52 | 53 | cause: Exception 54 | """Error causer""" 55 | 56 | text_location: TextLocation 57 | """Error location inside text.""" 58 | 59 | def __init__(self, cause: Exception, text_location: TextLocation = None): 60 | self.cause = cause 61 | self.text_location: Optional[TextLocation] = text_location 62 | -------------------------------------------------------------------------------- /pydantic_settings/decoder/json.py: -------------------------------------------------------------------------------- 1 | import copy 2 | import json 3 | import json.scanner 4 | from functools import partial, wraps 5 | from typing import Any, Callable, Dict, List, TextIO, Tuple, Union 6 | 7 | from attr import dataclass 8 | 9 | from pydantic_settings.types import Json, JsonLocation, TextLocation 10 | 11 | from .common import ( 12 | ListExpectError, 13 | LocationLookupError, 14 | MappingExpectError, 15 | ParsingError, 16 | TextValues, 17 | ) 18 | 19 | 20 | def _rfind(s: str, sym: str, start: int = None, end: int = None) -> int: 21 | res = s.rfind(sym, start, end) 22 | return res 23 | 24 | 25 | def _create_object_hook(original_value): 26 | @wraps(original_value) 27 | def routine( 28 | s_with_end: Tuple[str, int], *args: Any, **kwargs: Any 29 | ) -> Tuple[ASTItem, int]: 30 | s, end = s_with_end 31 | value, new_end = original_value(s_with_end, *args, **kwargs) 32 | 33 | col = end - _rfind(s, '\n', None, end - 1) - 1 34 | end_col = new_end - _rfind(s, '\n', None, new_end - 1) 35 | 36 | return ( 37 | ASTItem( 38 | location=TextLocation( 39 | line=s.count('\n', None, end) + 1, 40 | col=col, 41 | end_line=s.count('\n', None, new_end) + 1, 42 | end_col=end_col, 43 | pos=end, 44 | end_pos=new_end, 45 | ), 46 | value=value, 47 | ), 48 | new_end, 49 | ) 50 | 51 | return routine 52 | 53 | 54 | @dataclass 55 | class ASTItem: 56 | location: TextLocation 57 | value: 'AstJsonLike' 58 | 59 | @classmethod 60 | def create( 61 | cls, 62 | line: int, 63 | col: int, 64 | end_line: int, 65 | end_col: int, 66 | val: 'AstJsonLike', 67 | pos: int = None, 68 | end_pos: int = None, 69 | ) -> 'ASTItem': 70 | return ASTItem( 71 | TextLocation(line, col, end_line, end_col, pos or 0, end_pos or 0), 72 | val, 73 | ) 74 | 75 | def get_json_value(self) -> Json: 76 | if isinstance(self.value, list): 77 | return [child.get_json_value() for child in self.value] 78 | if isinstance(self.value, dict): 79 | return { 80 | key: child.get_json_value() 81 | for key, child in self.value.items() 82 | } 83 | return self.value 84 | 85 | 86 | AstJsonLike = Union[None, float, int, str, List[ASTItem], Dict[str, ASTItem]] 87 | 88 | 89 | def _create_scanner_wrapper( 90 | scanner: Callable[[str, int], Tuple[Json, int]] 91 | ) -> Callable[[str, int], Tuple[ASTItem, int]]: 92 | @wraps(scanner) 93 | def wrapper(s: str, idx: int) -> Tuple[ASTItem, int]: 94 | val, end = scanner(s, idx) 95 | if isinstance(val, ASTItem): 96 | return val, end 97 | 98 | line = s.count('\n', None, idx) + 1 99 | newline_from_left = _rfind(s, '\n', None, idx) 100 | col = idx - newline_from_left 101 | end_col = end - newline_from_left 102 | 103 | is_supported = val is None or isinstance(val, (int, float, bool)) 104 | if not is_supported: 105 | raise ValueError( 106 | f'unexpected value has been returned from scanner: ' 107 | f'"{val}" of type {type(val)}' 108 | ) 109 | 110 | return ( 111 | ASTItem(TextLocation(line, col, line, end_col, idx, end), val), 112 | end, 113 | ) 114 | 115 | return wrapper 116 | 117 | 118 | class ASTDecoder(json.JSONDecoder): 119 | def __init__(self): 120 | from json.decoder import JSONArray, JSONObject, scanstring 121 | 122 | super().__init__() 123 | 124 | self.parse_object = _create_object_hook(JSONObject) 125 | self.parse_array = _create_object_hook(JSONArray) 126 | str_parser_wrapper = _create_object_hook( 127 | lambda s_with_end, strict: scanstring(*s_with_end, strict) 128 | ) 129 | self.parse_string = lambda s, end, strict: str_parser_wrapper( 130 | (s, end), strict 131 | ) 132 | 133 | # Here i'am patching scanner closure, because it's internally refers to 134 | # itself and it is't configurable. 135 | # Schema is: 'py_make_scanner' defines '_scan_once', which is referred 136 | # by 'scan_once' which is result of 'py_make_scanner()' expression. 137 | orig_scanner = copy.deepcopy(json.scanner.py_make_scanner(self)) 138 | try: 139 | cell = next( 140 | cell 141 | for cell in orig_scanner.__closure__ 142 | if callable(cell.cell_contents) 143 | and cell.cell_contents.__name__ == '_scan_once' 144 | ) 145 | except StopIteration: 146 | raise ValueError( 147 | f'Failed to path {orig_scanner.__name__}, ' 148 | f'probably the internals has been changed' 149 | ) 150 | 151 | self.scan_once = _create_scanner_wrapper(cell.cell_contents) 152 | # Function closure cells read-only before python 3.7, 153 | # here using one approach found on internet ... 154 | _cell_set(cell, self.scan_once) 155 | 156 | 157 | load = partial(json.load, cls=ASTDecoder) 158 | loads = partial(json.loads, cls=ASTDecoder) 159 | 160 | 161 | def decode_document(content: Union[str, TextIO]) -> TextValues: 162 | try: 163 | if isinstance(content, str): 164 | tree = loads(content) 165 | else: 166 | tree = load(content) 167 | except json.JSONDecodeError as err: 168 | raise ParsingError( 169 | err, TextLocation(err.lineno, err.colno, -1, -1, err.pos, -1) 170 | ) 171 | 172 | if not isinstance(tree.value, dict): 173 | raise ParsingError( 174 | ValueError('document root item must be a mapping'), None 175 | ) 176 | 177 | return TextValues(_LocationFinder(tree), **tree.get_json_value()) 178 | 179 | 180 | class _LocationFinder: 181 | def __init__(self, root_item: ASTItem): 182 | self.root_item = root_item 183 | 184 | def get_location(self, key: JsonLocation) -> TextLocation: 185 | try: 186 | return self._get_location(key) 187 | except LocationLookupError as err: 188 | # in case of this error __causer__ field will be populated 189 | # with more specific error, so it might be helpful during debugging 190 | raise KeyError(key) from err 191 | 192 | def _get_location(self, key: JsonLocation) -> TextLocation: 193 | curr_item = self.root_item 194 | for i, key_part in enumerate(key): 195 | if isinstance(key_part, int) and not isinstance( 196 | curr_item.value, list 197 | ): 198 | raise ListExpectError(key, i) 199 | elif isinstance(key_part, str) and not isinstance( 200 | curr_item.value, dict 201 | ): 202 | raise MappingExpectError(key, i) 203 | 204 | try: 205 | curr_item = curr_item.value[key_part] 206 | except (KeyError, IndexError): 207 | raise LocationLookupError(key, i) 208 | 209 | return curr_item.location 210 | 211 | 212 | def _make_cell_set_template_code(): 213 | """ 214 | This module was extracted from the `cloud` package, developed by 215 | PiCloud, Inc. 216 | 217 | Copyright (c) 2015, Cloudpickle contributors. 218 | Copyright (c) 2012, Regents of the University of California. 219 | Copyright (c) 2009 PiCloud, Inc. http://www.picloud.com. 220 | All rights reserved. 221 | 222 | Redistribution and use in source and binary forms, with or without 223 | modification, are permitted provided that the following conditions 224 | are met: 225 | * Redistributions of source code must retain the above copyright 226 | notice, this list of conditions and the following disclaimer. 227 | * Redistributions in binary form must reproduce the above copyright 228 | notice, this list of conditions and the following disclaimer in the 229 | documentation and/or other materials provided with the distribution. 230 | * Neither the name of the University of California, Berkeley nor the 231 | names of its contributors may be used to endorse or promote 232 | products derived from this software without specific prior written 233 | permission. 234 | 235 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 236 | "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 237 | LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 238 | A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 239 | HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 240 | SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED 241 | TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR 242 | PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF 243 | LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING 244 | NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS 245 | SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 246 | 247 | Docs stripped, borrowed from here 248 | https://github.com/cloudpipe/cloudpickle/pull/90/files#diff-d2a3618afedd4e124c532151eedbae09R74 249 | """ 250 | import types 251 | 252 | def inner(value): 253 | lambda: cell # make ``cell`` a closure so that we get a STORE_DEREF 254 | cell = value 255 | 256 | co = inner.__code__ 257 | 258 | # NOTE: we are marking the cell variable as a free variable intentionally 259 | # so that we simulate an inner function instead of the outer function. This 260 | # is what gives us the ``nonlocal`` behavior in a Python 2 compatible way. 261 | return types.CodeType( 262 | co.co_argcount, 263 | co.co_kwonlyargcount, 264 | co.co_nlocals, 265 | co.co_stacksize, 266 | co.co_flags, 267 | co.co_code, 268 | co.co_consts, 269 | co.co_names, 270 | co.co_varnames, 271 | co.co_filename, 272 | co.co_name, 273 | co.co_firstlineno, 274 | co.co_lnotab, 275 | co.co_cellvars, # this is the trickery 276 | (), 277 | ) 278 | 279 | 280 | def _cell_set(cell, value): 281 | """ 282 | Set the value of a closure cell. 283 | """ 284 | import sys 285 | import types 286 | 287 | if sys.version_info < (3, 7): 288 | cell_set_template_code = _make_cell_set_template_code() 289 | return types.FunctionType( 290 | cell_set_template_code, {}, '_cell_set_inner', (), (cell,) 291 | )(value) 292 | else: 293 | cell.cell_contents = value 294 | 295 | 296 | # check that json module is compatible with our implementation 297 | def _check_capability(): 298 | ASTDecoder() 299 | 300 | 301 | _check_capability() 302 | -------------------------------------------------------------------------------- /pydantic_settings/decoder/yaml.py: -------------------------------------------------------------------------------- 1 | import io 2 | from typing import TextIO, Union 3 | 4 | import yaml 5 | 6 | from pydantic_settings.types import JsonLocation 7 | 8 | from .. import TextLocation 9 | from .common import ( 10 | ListExpectError, 11 | LocationLookupError, 12 | MappingExpectError, 13 | ParsingError, 14 | TextValues, 15 | ) 16 | 17 | 18 | class _LocationFinder: 19 | def __init__(self, root_node: yaml.Node): 20 | self._node = root_node 21 | 22 | def get_location(self, key: JsonLocation) -> TextLocation: 23 | try: 24 | node = self._lookup_node_by_loc(key) 25 | except LocationLookupError as err: 26 | raise KeyError(key) from err 27 | 28 | return TextLocation( 29 | node.start_mark.line + 1, 30 | node.start_mark.column + 1, 31 | node.end_mark.line + 1, 32 | node.end_mark.column + 1, 33 | node.start_mark.pointer, 34 | node.end_mark.pointer, 35 | ) 36 | 37 | def _lookup_node_by_loc(self, key: JsonLocation) -> yaml.Node: 38 | curr_node = self._node 39 | if curr_node is None: 40 | raise LocationLookupError(key, -1) 41 | 42 | for part_num, key_part in enumerate(key): 43 | new_node = curr_node 44 | if not isinstance(curr_node, yaml.CollectionNode): 45 | raise LocationLookupError(key, part_num) 46 | if isinstance(key_part, str): 47 | if not isinstance(curr_node, yaml.MappingNode): 48 | raise MappingExpectError(key, part_num) 49 | 50 | for key_node, value_node in curr_node.value: 51 | if key_node.value == key_part: 52 | new_node = value_node 53 | else: 54 | if not isinstance(curr_node, yaml.SequenceNode): 55 | raise ListExpectError(key, part_num) 56 | 57 | if len(curr_node.value) < key_part: 58 | new_node = curr_node.value[key_part] 59 | 60 | if new_node is curr_node: 61 | raise LocationLookupError(key, part_num) 62 | curr_node = new_node 63 | 64 | return curr_node 65 | 66 | 67 | def decode_document( 68 | content: Union[str, TextIO], *, loader_cls=yaml.SafeLoader 69 | ) -> TextValues: 70 | if isinstance(content, str): 71 | stream = io.StringIO(content) 72 | else: 73 | stream = content 74 | 75 | loader = loader_cls(stream) 76 | 77 | try: 78 | root_node = loader.get_single_node() 79 | values = loader.construct_document(root_node) 80 | except yaml.YAMLError as err: 81 | if not isinstance(err, yaml.MarkedYAMLError): 82 | loc = None 83 | else: 84 | loc = TextLocation( 85 | err.problem_mark.line + 1, 86 | err.problem_mark.column + 1, 87 | -1, 88 | -1, 89 | err.problem_mark.pointer, 90 | -1, 91 | ) 92 | 93 | raise ParsingError(err, loc) 94 | 95 | if values is None: 96 | values = {} 97 | if not isinstance(values, dict): 98 | raise ParsingError( 99 | ValueError('document root item must be a mapping'), None 100 | ) 101 | 102 | return TextValues(_LocationFinder(root_node), **values) 103 | -------------------------------------------------------------------------------- /pydantic_settings/errors.py: -------------------------------------------------------------------------------- 1 | from dataclasses import asdict 2 | from pathlib import Path 3 | from typing import ( 4 | Any, 5 | Dict, 6 | Iterator, 7 | List, 8 | Optional, 9 | Sequence, 10 | Tuple, 11 | Type, 12 | cast, 13 | ) 14 | 15 | from pydantic import BaseConfig, BaseModel, ValidationError 16 | from pydantic.error_wrappers import ErrorWrapper, error_dict 17 | 18 | from pydantic_settings.decoder import DecoderMeta 19 | from pydantic_settings.types import ( 20 | AnySourceLocation, 21 | AnySourceLocProvider, 22 | Json, 23 | JsonDict, 24 | JsonLocation, 25 | TextLocation, 26 | ) 27 | 28 | 29 | class LoadingError(ValueError): 30 | """General loading error.""" 31 | 32 | file_path: Optional[Path] 33 | """Source file path, or none if in-memory string used""" 34 | 35 | cause: Optional[Exception] 36 | """Cause of error (used instead of :py:attr:`__cause__` attribute)""" 37 | 38 | msg: Optional[str] 39 | """Optional error message""" 40 | 41 | def __init__( 42 | self, 43 | file_path: Optional[Path], 44 | cause: Exception = None, 45 | msg: str = None, 46 | ): 47 | self.args = self.file_path, self.cause, self.msg = ( 48 | file_path, 49 | cause, 50 | msg, 51 | ) 52 | 53 | def render_error(self) -> str: 54 | """ 55 | Render error as a human-readable text 56 | 57 | :return: rendered text string 58 | """ 59 | return f'{str(self.cause) if self.cause else self.msg}' 60 | 61 | def __str__(self) -> str: 62 | return f'{type(self).__name__}: {self.render_error()}' 63 | 64 | 65 | class LoadingParseError(LoadingError): 66 | """Parsing error occurs while load.""" 67 | 68 | def __init__( 69 | self, 70 | *args: Any, 71 | decoder: DecoderMeta = None, 72 | location: TextLocation = None, 73 | **kwargs, 74 | ): 75 | super().__init__(*args, **kwargs) 76 | self.location = location 77 | self.decoder = decoder 78 | 79 | def render_error(self) -> str: 80 | return ( 81 | f'parsing error occurs while loading settings from ' 82 | f'{_render_err_file_path(self.file_path)} ' 83 | f'using "{self.decoder.name} loader": {str(self.cause)}' 84 | ) 85 | 86 | 87 | class LoadingValidationError(LoadingError, ValidationError): 88 | """Joins :py:class:`pydantic.ValidationError` and :py:class:`LoadingError`, 89 | primarily to allow catching specific for :py:func:`.load_settings` function 90 | errors at once. 91 | """ 92 | 93 | __slots__ = 'file_path', 'cause', 'msg' 94 | 95 | def __init__( 96 | self, 97 | raw_errors: Sequence[ErrorWrapper], 98 | model: Type[BaseModel], 99 | file_path: Optional[Path], 100 | ): 101 | ValidationError.__init__(self, raw_errors, model) 102 | super().__init__(file_path, None) 103 | 104 | def errors(self) -> List[Dict[str, Any]]: 105 | if self._error_cache is None: 106 | try: 107 | config = self.model.__config__ # type: ignore 108 | except AttributeError: 109 | config = ( # type: ignore 110 | self.model.__pydantic_model__.__config__ 111 | ) 112 | self._error_cache = serialize_errors(self, config) 113 | return self._error_cache 114 | 115 | def render_error(self) -> str: 116 | return render_validation_error(self) 117 | 118 | 119 | class ExtendedErrorWrapper(ErrorWrapper): 120 | """ 121 | Extends `pydantic.ErrorWrapper` adding fields used to locate bad filed 122 | value inside source text or environment variables. 123 | """ 124 | 125 | __slots__ = ('source_loc',) 126 | 127 | source_loc: AnySourceLocation 128 | """ 129 | Describes source location, corresponding to 130 | :py:attr:`pydantic.ErrorWrapper.loc`. 131 | """ 132 | 133 | def __init__( 134 | self, 135 | exc: Exception, 136 | loc: JsonLocation, 137 | source_loc: AnySourceLocation = None, 138 | ): 139 | super().__init__(exc, tuple(loc)) 140 | self.source_loc = source_loc 141 | 142 | def __repr_args__(self) -> Sequence[Tuple[Optional[str], Any]]: 143 | return list(super().__repr_args__()) + [ 144 | ('source_loc', self.source_loc) 145 | ] 146 | 147 | 148 | def _flatten_errors_wrappers( 149 | errors: Sequence[Any], *, loc: Optional[JsonLocation] = None 150 | ) -> Iterator[Tuple[JsonLocation, ErrorWrapper]]: 151 | """Iterate through ValidationError error wrappers and reduce nesting.""" 152 | if loc is None: 153 | loc = () 154 | for error in errors: 155 | if isinstance(error, ErrorWrapper): 156 | error_loc = tuple(loc) + error.loc_tuple() 157 | if isinstance(error.exc, ValidationError): 158 | yield from _flatten_errors_wrappers( 159 | error.exc.raw_errors, loc=error_loc 160 | ) 161 | else: 162 | yield error_loc, error 163 | else: 164 | raise RuntimeError(f'Unknown error object: {error}') 165 | 166 | 167 | def with_errs_locations( 168 | model: Type[BaseModel], 169 | validation_err: ValidationError, 170 | values_source: AnySourceLocProvider, 171 | ) -> ValidationError: 172 | def process_err_wrapper( 173 | err_wrapper: ErrorWrapper, loc_override: JsonLocation 174 | ) -> ErrorWrapper: 175 | try: 176 | location = values_source.get_location(loc_override) 177 | except KeyError: 178 | if isinstance(err_wrapper, ExtendedErrorWrapper): 179 | return ExtendedErrorWrapper( 180 | err_wrapper.exc, loc_override, err_wrapper.source_loc 181 | ) 182 | else: 183 | return ErrorWrapper(err_wrapper.exc, tuple(loc_override)) 184 | 185 | return ExtendedErrorWrapper( 186 | err_wrapper.exc, loc_override, source_loc=location 187 | ) 188 | 189 | return ValidationError( 190 | [ 191 | process_err_wrapper(raw_err, model_loc) 192 | for model_loc, raw_err in _flatten_errors_wrappers( 193 | validation_err.raw_errors 194 | ) 195 | ], 196 | model, 197 | ) 198 | 199 | 200 | def serialize_errors( 201 | err: ValidationError, config: Type[BaseConfig] 202 | ) -> List[Json]: 203 | return [ 204 | _ext_error_dict(err_wrapper, config) for err_wrapper in err.raw_errors 205 | ] 206 | 207 | 208 | def render_validation_error(error: LoadingValidationError) -> str: 209 | if issubclass(error.model, BaseModel): 210 | config = error.model.__config__ 211 | else: 212 | config = error.model.__pydantic_model__.__config__ 213 | 214 | errors = list(_flatten_errors_wrappers(error.raw_errors)) 215 | errors_num = len(errors) 216 | 217 | rendered_errors = '\n'.join( 218 | _render_raw_error(raw_err, model_loc, config) 219 | for model_loc, raw_err in errors 220 | ) 221 | env_used = any( 222 | not isinstance(raw_err.source_loc, TextLocation) 223 | for _, raw_err in errors 224 | if isinstance(raw_err, ExtendedErrorWrapper) 225 | ) 226 | 227 | return ( 228 | f'{errors_num} validation error{"" if errors_num == 1 else "s"} ' 229 | f'for {error.model.__name__} ' 230 | f'({_render_err_file_path(error.file_path)}' 231 | f"{' and environment variables' if env_used else ''}" 232 | f'):\n{rendered_errors}' 233 | ) 234 | 235 | 236 | def _render_err_file_path(file_path: Path) -> str: 237 | if file_path is not None: 238 | return f'configuration file at "{file_path}"' 239 | else: 240 | return 'in-memory buffer' 241 | 242 | 243 | def _render_raw_error( 244 | raw_err: ErrorWrapper, loc_override: JsonLocation, config: Type[BaseConfig] 245 | ) -> str: 246 | serialized_err = cast( 247 | JsonDict, error_dict(raw_err.exc, config, tuple(loc_override)) 248 | ) 249 | return ( 250 | f'{_render_err_loc(raw_err, loc_override)}\n' 251 | f'\t{serialized_err["msg"]} ' 252 | f'({_render_error_type_and_ctx(serialized_err)})' 253 | ) 254 | 255 | 256 | def _render_error_type_and_ctx(error: JsonDict) -> str: 257 | t = 'type=' + error['type'] 258 | ctx = cast(JsonDict, error.get('ctx')) 259 | if ctx: 260 | return t + ''.join(f'; {k}={v}' for k, v in ctx.items()) 261 | else: 262 | return t 263 | 264 | 265 | def _render_err_loc(raw_err: ErrorWrapper, loc_override: JsonLocation) -> str: 266 | model_loc = ' -> '.join(str(loc) for loc in loc_override) 267 | if isinstance(raw_err, ExtendedErrorWrapper): 268 | if not isinstance(raw_err.source_loc, TextLocation): 269 | env_name, text_loc = raw_err.source_loc 270 | from_loc = f' from env "{env_name}"' 271 | if text_loc is not None: 272 | from_loc += f' at {text_loc.pos}:{text_loc.end_pos}' 273 | else: 274 | source_loc = raw_err.source_loc 275 | from_loc = f' from file at {source_loc.line}:{source_loc.col}' 276 | 277 | return model_loc + from_loc 278 | 279 | return model_loc 280 | 281 | 282 | def _serialize_source_loc(loc: AnySourceLocation) -> Json: 283 | if isinstance(loc, TextLocation): 284 | return asdict(loc) 285 | 286 | assert ( 287 | isinstance(loc, Sequence) 288 | and len(loc) == 2 289 | and (loc[1] is None or isinstance(loc[1], TextLocation)) 290 | ) 291 | 292 | env_name, text_loc = loc 293 | return [env_name, asdict(text_loc) if text_loc is not None else None] 294 | 295 | 296 | def _ext_error_dict( 297 | err_wrapper: ErrorWrapper, config: Type[BaseConfig] 298 | ) -> JsonDict: 299 | res = cast( 300 | JsonDict, error_dict(err_wrapper.exc, config, err_wrapper.loc_tuple()) 301 | ) 302 | if isinstance(err_wrapper, ExtendedErrorWrapper): 303 | res['source_loc'] = _serialize_source_loc(err_wrapper.source_loc) 304 | 305 | return res 306 | -------------------------------------------------------------------------------- /pydantic_settings/load.py: -------------------------------------------------------------------------------- 1 | import json 2 | from io import StringIO 3 | from os import environ as os_environ 4 | from pathlib import Path 5 | from typing import ( 6 | Callable, 7 | Mapping, 8 | Optional, 9 | TextIO, 10 | Tuple, 11 | Type, 12 | TypeVar, 13 | Union, 14 | ) 15 | 16 | from pydantic import BaseModel, ValidationError 17 | 18 | from pydantic_settings.base import BaseSettingsModel 19 | from pydantic_settings.decoder import ( 20 | DecoderMeta, 21 | DecoderNotFoundError, 22 | ParsingError, 23 | TextValues, 24 | get_decoder, 25 | ) 26 | from pydantic_settings.errors import ( 27 | LoadingError, 28 | LoadingParseError, 29 | LoadingValidationError, 30 | with_errs_locations, 31 | ) 32 | from pydantic_settings.restorer import FlatMapValues, ModelShapeRestorer 33 | from pydantic_settings.types import JsonDict 34 | from pydantic_settings.utils import deep_merge_mappings 35 | 36 | 37 | def _resolve_content_arg( 38 | any_content: Union[TextIO, str, Path], 39 | type_hint: str, 40 | content_reader: Callable[[Path], str], 41 | ) -> Tuple[DecoderMeta, Optional[Path], str]: 42 | def decoder_by_type_hint(file_path_: Path = None) -> DecoderMeta: 43 | try: 44 | return get_decoder(type_hint) 45 | except DecoderNotFoundError as err_: 46 | raise LoadingError(file_path_, err_) 47 | 48 | if isinstance(any_content, Path): 49 | file_path = any_content 50 | try: 51 | content = content_reader(file_path) 52 | except FileNotFoundError as err: 53 | raise LoadingError(file_path, err) 54 | 55 | try: 56 | return get_decoder(file_path.suffix), file_path, content 57 | except DecoderNotFoundError as err: 58 | if type_hint is None: 59 | raise LoadingError( 60 | file_path, 61 | err, 62 | f'cannot determine decoder from' 63 | f'file suffix "{file_path.suffix}"', 64 | ) 65 | 66 | return decoder_by_type_hint(file_path), file_path, content 67 | else: 68 | if isinstance(any_content, StringIO): 69 | content = any_content.getvalue() 70 | elif isinstance(any_content, str): 71 | content = any_content 72 | else: 73 | content = any_content.read() 74 | 75 | if type_hint is None: 76 | raise LoadingError( 77 | None, 78 | None, 79 | f'"type_hint" argument is required if ' 80 | f'content is not an instance of "{Path.__module__}.' 81 | f'{Path.__qualname__}" class', 82 | ) 83 | 84 | return decoder_by_type_hint(), None, content 85 | 86 | 87 | def _get_shape_restorer( 88 | cls: Type[BaseModel], env_prefix: str 89 | ) -> ModelShapeRestorer: 90 | if issubclass(cls, BaseSettingsModel): 91 | restorer = cls.shape_restorer 92 | else: 93 | restorer = ModelShapeRestorer(cls, env_prefix, False, json.loads) 94 | 95 | return restorer 96 | 97 | 98 | SettingsM = TypeVar('SettingsM', bound=BaseModel) 99 | 100 | 101 | def load_settings( 102 | cls: Type[SettingsM], 103 | any_content: Union[None, TextIO, str, Path] = None, 104 | *, 105 | type_hint: str = None, 106 | load_env: bool = False, 107 | env_prefix: str = 'APP', 108 | environ: Mapping[str, str] = None, 109 | _content_reader: Callable[[Path], str] = Path.read_text, 110 | ) -> SettingsM: 111 | """ 112 | Load setting from `any_content` and optionally merge with environment 113 | variables. Content loaded from file path, from file-like source or from 114 | plain text. 115 | 116 | You could omit `any_content` argument in case you want to load settings 117 | only from environment variables. 118 | 119 | :param cls: either :py:class:`BaseSettingsModel` or 120 | :py:class:`pydantic.BaseModel` subclass type. The result will be 121 | instance of a given type. 122 | :param any_content: content from which settings will be loaded 123 | :param type_hint: determines content decoder. Required, if content isn't 124 | provided as a file path. Takes precedence over actual file suffix. 125 | :param load_env: determines whether load environment variables or not 126 | :param env_prefix: determines prefix used to match model field with 127 | appropriate environment variable. *NOTE* if `cls` argument is 128 | subclass of :py:class:`BaseSettingsModel` then `env_prefix` 129 | argument will be ignored. 130 | :param environ: environment to use instead of `os.environ`. 131 | :raises LoadingError: in case if any error occurred while loading settings 132 | :return: instance of settings model, provided by `cls` argument 133 | """ 134 | if any_content is None and not load_env: 135 | raise LoadingError( 136 | None, msg='no sources provided to load settings from' 137 | ) 138 | 139 | decoder_desc: Optional[DecoderMeta] = None 140 | file_path: Optional[Path] = None 141 | content: Optional[str] = None 142 | 143 | if any_content is not None: 144 | decoder_desc, file_path, content = _resolve_content_arg( 145 | any_content, type_hint, _content_reader 146 | ) 147 | 148 | document_content: Optional[JsonDict] = None 149 | file_values: Optional[TextValues] = None 150 | if content is not None: 151 | try: 152 | document_content = file_values = decoder_desc.values_loader( 153 | content 154 | ) 155 | except ParsingError as err: 156 | raise LoadingParseError( 157 | file_path, 158 | err.cause, 159 | location=err.text_location, 160 | decoder=decoder_desc, 161 | ) 162 | 163 | # prepare environment values 164 | env_values: Optional[FlatMapValues] = None 165 | if load_env: 166 | # TODO: ignore env vars restoration errors so far 167 | restorer = _get_shape_restorer(cls, env_prefix) 168 | env_values, _ = restorer.restore(environ or os_environ) 169 | 170 | if document_content is not None: 171 | document_content = deep_merge_mappings( 172 | env_values, document_content 173 | ) 174 | else: 175 | document_content = env_values 176 | 177 | try: 178 | result = cls(**document_content) 179 | except ValidationError as err: 180 | assert len(err.raw_errors) > 0 181 | new_err = err 182 | 183 | if file_values is not None: 184 | new_err = with_errs_locations(cls, err, file_values) 185 | if env_values is not None: 186 | new_err = with_errs_locations(cls, new_err, env_values) 187 | 188 | raise LoadingValidationError( 189 | new_err.raw_errors, cls, file_path 190 | ) from err 191 | 192 | return result 193 | -------------------------------------------------------------------------------- /pydantic_settings/restorer.py: -------------------------------------------------------------------------------- 1 | from collections import defaultdict 2 | from dataclasses import is_dataclass 3 | from functools import reduce 4 | from typing import ( 5 | Any, 6 | Callable, 7 | Dict, 8 | Iterator, 9 | List, 10 | Mapping, 11 | NamedTuple, 12 | Optional, 13 | Sequence, 14 | Tuple, 15 | Type, 16 | Union, 17 | cast, 18 | ) 19 | 20 | from attr import has as is_attr_class 21 | from pydantic import BaseModel 22 | 23 | from pydantic_settings.decoder import ParsingError, TextValues 24 | from pydantic_settings.types import ( 25 | AnyModelType, 26 | FlatMapLocation, 27 | Json, 28 | JsonDict, 29 | JsonLocation, 30 | ) 31 | from pydantic_settings.utils import get_union_subtypes 32 | 33 | _RecStrDictValue = Union['_RecStrDict', str] 34 | _RecStrDict = Dict[str, _RecStrDictValue] 35 | 36 | 37 | class _FieldLocDescription(NamedTuple): 38 | path: Tuple[str, ...] 39 | is_complex: bool 40 | is_determined: bool 41 | 42 | 43 | def _noop(val: Any) -> Any: 44 | return val 45 | 46 | 47 | def _is_determined_complex_field(field_type: Any) -> bool: 48 | """ 49 | Is field of given type have determined set of fields. 50 | """ 51 | return ( 52 | is_attr_class(field_type) 53 | or is_dataclass(field_type) 54 | or (isinstance(field_type, type) and issubclass(field_type, BaseModel)) 55 | ) 56 | 57 | 58 | def _estimate_field_complexity(field_type: Any) -> Tuple[bool, bool]: 59 | if _is_determined_complex_field(field_type): 60 | return True, True 61 | 62 | try: 63 | union_subtypes = get_union_subtypes(field_type) 64 | except TypeError: 65 | pass 66 | else: 67 | return reduce( 68 | lambda prev, item: (prev[0] or item, prev[1] and item), 69 | ( 70 | _is_determined_complex_field(subtype) 71 | for subtype in union_subtypes 72 | ), 73 | (False, True), 74 | ) 75 | 76 | return False, False 77 | 78 | 79 | def _list_fields(type_: Type) -> Iterator[Tuple[str, Type]]: 80 | try: 81 | union_subtypes = get_union_subtypes(type_) 82 | except TypeError: 83 | pass 84 | else: 85 | for subtype in union_subtypes: 86 | try: 87 | yield from _list_fields(subtype) 88 | except TypeError: 89 | pass 90 | return 91 | 92 | if isinstance(type_, type) and issubclass(type_, BaseModel): 93 | for field in type_.__fields__.values(): 94 | yield field.name, field.type_ 95 | elif is_attr_class(type_): 96 | for field in type_.__attrs_attrs__: 97 | yield field.name, field.type 98 | elif is_dataclass(type_): 99 | for field in type_.__dataclass_fields__.values(): 100 | yield field.name, field.type 101 | else: 102 | raise TypeError(f"{type_} value isn't a model type") 103 | 104 | 105 | def _traveler( 106 | model: Type[AnyModelType], 107 | prefix: str, 108 | loc: Tuple[str, ...], 109 | case_reducer: Callable[[str], str], 110 | ) -> Iterator[Tuple[str, _FieldLocDescription]]: 111 | for field_name, field_type in _list_fields(model): 112 | upper_field_name = case_reducer(field_name) 113 | new_prefix = f'{prefix}_{upper_field_name}' 114 | new_loc = loc + (field_name,) 115 | is_complex, is_only_complex = _estimate_field_complexity(field_type) 116 | yield new_prefix, _FieldLocDescription( 117 | new_loc, is_complex, is_only_complex 118 | ) 119 | if is_complex: 120 | yield from _traveler( 121 | cast(AnyModelType, field_type), 122 | new_prefix, 123 | new_loc, 124 | case_reducer, 125 | ) 126 | 127 | 128 | def _build_model_flat_map( 129 | model: Type[BaseModel], prefix: str, case_reducer: Callable[[str], str] 130 | ) -> Dict[str, _FieldLocDescription]: 131 | return dict(_traveler(model, prefix, (), case_reducer)) 132 | 133 | 134 | class FlatMapValues(Dict[str, Json]): 135 | __slots__ = 'restored_env_values', 'restored_text_values' 136 | 137 | def __init__( 138 | self, 139 | restored_env_values: Dict[JsonLocation, str], 140 | restored_text_values: Dict[str, Union[TextValues, Dict]], 141 | **values: Json, 142 | ): 143 | super().__init__(**values) 144 | self.restored_env_values = restored_env_values 145 | self.restored_text_values = restored_text_values 146 | 147 | def get_location(self, val_loc: JsonLocation) -> FlatMapLocation: 148 | """ 149 | Maps model location to flat-mapping location, preserving original case 150 | 151 | :param val_loc: model location 152 | :raises KeyError: in case if such value hasn't been restored 153 | :return: flat-mapping location 154 | """ 155 | try: 156 | return self._get_location(val_loc) 157 | except KeyError: 158 | raise KeyError(val_loc) 159 | 160 | def _get_location(self, val_loc: JsonLocation) -> FlatMapLocation: 161 | try: 162 | return self.restored_env_values[val_loc], None 163 | except KeyError: 164 | pass 165 | 166 | loc_iter = iter(val_loc) 167 | 168 | key_used: Optional[str] = None 169 | text_vals: Optional[TextValues] = None 170 | 171 | curr = self.restored_text_values 172 | for part in loc_iter: 173 | curr = curr[part] 174 | if isinstance(curr, tuple): 175 | key_used, text_vals = curr 176 | break 177 | 178 | if text_vals is None: 179 | raise KeyError(val_loc) 180 | 181 | return key_used, text_vals.get_location(tuple(loc_iter)) 182 | 183 | 184 | class InvalidAssignError(ValueError): 185 | """Value applying error.""" 186 | 187 | def __init__(self, loc: Optional[Sequence[str]], key: str): 188 | self.loc = loc 189 | self.key = key 190 | 191 | 192 | class CannotParseValueError(InvalidAssignError): 193 | """Cannot parse value.""" 194 | 195 | 196 | class AssignBeyondSimpleValueError(InvalidAssignError): 197 | """Assigning value deeper then previous simple value is forbidden.""" 198 | 199 | 200 | class ModelShapeRestorer(object): 201 | """ 202 | Restores flat-mapping into JSON document of known shape. 203 | 204 | Currently, setting nested a value inside of any sequence isn't supported. 205 | """ 206 | 207 | def __init__( 208 | self, 209 | model: AnyModelType, 210 | prefix: str, 211 | case_sensitive: bool, 212 | dead_end_value_resolver: Callable[[str], TextValues], 213 | ): 214 | self._case_reducer = _noop if case_sensitive else str.casefold 215 | self._prefix = self._case_reducer(prefix) 216 | self._model_flat_map = _build_model_flat_map( 217 | model, self._prefix, self._case_reducer 218 | ) 219 | self._dead_end_resolver = dead_end_value_resolver 220 | 221 | @property 222 | def prefix(self) -> str: 223 | return self._prefix 224 | 225 | @prefix.setter 226 | def prefix(self, val: str): 227 | self._prefix = val 228 | 229 | def restore( 230 | self, flat_map: Mapping[str, str] 231 | ) -> Tuple['FlatMapValues', Optional[Sequence[InvalidAssignError]]]: 232 | errs: List[InvalidAssignError] = [] 233 | target: Dict[str, Json] = {} 234 | consumed_envs: Dict[JsonLocation, str] = {} 235 | 236 | def default_dict_factory(): 237 | return defaultdict(default_dict_factory) 238 | 239 | consumed_text_vals = default_dict_factory() 240 | 241 | for orig_key, val in flat_map.items(): 242 | key = self._case_reducer(orig_key) 243 | if not key.startswith(self._prefix): 244 | continue 245 | 246 | try: 247 | path, is_complex, is_only_complex = self._model_flat_map[key] 248 | except KeyError: 249 | continue 250 | 251 | if is_complex or is_only_complex: 252 | try: 253 | val = self._dead_end_resolver(val) 254 | assert isinstance( 255 | val, TextValues 256 | ), 'Check is correct decoder used' 257 | reduce( 258 | lambda curr, item: curr[item], 259 | path[:-1], 260 | consumed_text_vals, 261 | )[path[-1]] = (orig_key, val) 262 | except ParsingError as err: 263 | if is_only_complex: 264 | new_err = CannotParseValueError(path, orig_key) 265 | new_err.__cause__ = err.cause 266 | errs.append(new_err) 267 | continue 268 | 269 | try: 270 | _apply_path_value(target, path, orig_key, val) 271 | except InvalidAssignError as e: 272 | errs.append(e) 273 | else: 274 | consumed_envs[path] = orig_key 275 | 276 | return FlatMapValues(consumed_envs, consumed_text_vals, **target), errs 277 | 278 | 279 | def _apply_path_value( 280 | root: JsonDict, 281 | path: Sequence[str], 282 | orig_key: str, 283 | value: Union[str, JsonDict], 284 | ): 285 | curr_segment: Any = root 286 | for path_part in path[:-1]: 287 | if not isinstance(curr_segment, dict): 288 | raise AssignBeyondSimpleValueError(path, orig_key) 289 | try: 290 | next_segment = curr_segment[path_part] 291 | except KeyError: 292 | next_segment = {} 293 | curr_segment[path_part] = next_segment 294 | 295 | curr_segment = next_segment 296 | curr_segment[path[-1]] = value 297 | -------------------------------------------------------------------------------- /pydantic_settings/types.py: -------------------------------------------------------------------------------- 1 | from dataclasses import Field 2 | from typing import ( 3 | Any, 4 | Callable, 5 | ClassVar, 6 | Dict, 7 | Iterator, 8 | List, 9 | Optional, 10 | Sequence, 11 | Tuple, 12 | Type, 13 | TypeVar, 14 | Union, 15 | ) 16 | 17 | from attr import dataclass 18 | from pydantic import BaseModel 19 | from typing_extensions import Protocol, runtime_checkable 20 | 21 | Json = Union[float, int, str, 'JsonDict', 'JsonList'] 22 | JsonDict = Dict[str, Json] 23 | JsonList = List[Json] 24 | 25 | 26 | @runtime_checkable 27 | class DataclassProtocol(Protocol): 28 | __dataclass_fields__: ClassVar[Dict[str, Field]] 29 | 30 | 31 | @runtime_checkable 32 | class PydanticDataclass(DataclassProtocol, Protocol): 33 | __initialised__: bool 34 | __pydantic_model__: ClassVar[BaseModel] 35 | 36 | @classmethod 37 | def __validate__( 38 | cls: Type['PydanticDataclass'], value: Any 39 | ) -> 'PydanticDataclass': 40 | raise NotImplementedError 41 | 42 | @classmethod 43 | def __get_validators__(cls) -> Iterator[Callable]: 44 | raise NotImplementedError 45 | 46 | 47 | AnyPydanticModel = Type[Union[BaseModel, PydanticDataclass]] 48 | AnyModelType = Type[Union[BaseModel, DataclassProtocol]] 49 | 50 | 51 | def is_pydantic_dataclass(cls: Type) -> bool: 52 | return isinstance(cls, PydanticDataclass) 53 | 54 | 55 | JsonLocation = Sequence[Union[str, int]] 56 | """ 57 | Sequence of indexes or keys, represents a path to reach the value inside some 58 | :py:obj:`JsonDict`. 59 | """ 60 | 61 | 62 | @dataclass 63 | class TextLocation: 64 | """ 65 | Describes value occurrence inside a text. 66 | """ 67 | 68 | line: int 69 | col: int 70 | end_line: int 71 | end_col: int 72 | 73 | pos: int 74 | end_pos: int 75 | 76 | 77 | FlatMapLocation = Tuple[str, Optional[TextLocation]] 78 | AnySourceLocation = Union[FlatMapLocation, TextLocation] 79 | 80 | 81 | SL = TypeVar('SL', contravariant=True) 82 | 83 | 84 | class SourceValueLocationProvider(Protocol[SL]): 85 | """ 86 | Describes location of a value inside the source. 87 | """ 88 | 89 | def get_location(self, val_loc: JsonLocation) -> SL: 90 | raise NotImplementedError 91 | 92 | 93 | AnySourceLocProvider = Union[ 94 | SourceValueLocationProvider[FlatMapLocation], 95 | SourceValueLocationProvider[TextLocation], 96 | ] 97 | -------------------------------------------------------------------------------- /pydantic_settings/utils.py: -------------------------------------------------------------------------------- 1 | from typing import Dict, Mapping, Tuple, Type, Union 2 | 3 | from pydantic_settings.types import Json 4 | 5 | _sentinel = object() 6 | 7 | 8 | def deep_merge_mappings( 9 | first_map: Mapping[str, Json], second_map: Mapping[str, Json] 10 | ) -> Dict[str, Json]: 11 | dst: Dict[str, Json] = {} 12 | keys = set(first_map).union(set(second_map)) 13 | for key in keys: 14 | first_val = first_map.get(key, _sentinel) 15 | second_val = second_map.get(key, _sentinel) 16 | 17 | assert first_val is not _sentinel or second_val is not _sentinel 18 | 19 | if first_val is _sentinel and second_val is not _sentinel: 20 | val = second_val 21 | else: 22 | val = first_val 23 | 24 | if isinstance(first_val, Mapping) and isinstance(second_val, Mapping): 25 | val = deep_merge_mappings(first_val, second_val) 26 | 27 | dst[key] = val 28 | 29 | return dst 30 | 31 | 32 | def get_generic_info(t: Type) -> Tuple[Type, Tuple[Type, ...]]: 33 | try: 34 | return t.__origin__, t.__args__ 35 | except AttributeError: 36 | raise TypeError(f'{t} is not a generic class') 37 | 38 | 39 | def get_union_subtypes(t: Type) -> Tuple[Type, ...]: 40 | origin, args = get_generic_info(t) 41 | if origin is not Union: 42 | raise TypeError(f'{t} is not typing.Union') 43 | return args 44 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.poetry] 2 | name = "pydantic-settings" 3 | version = "0.2.0" 4 | description = "Hipster-orgazmic tool to mange application settings" 5 | readme = "README.rst" 6 | authors = ["Daniel Daniels "] 7 | license = "MIT" 8 | repository = "https://github.com/danields761/pydantic-settings" 9 | classifiers = [ 10 | "Development Status :: 3 - Alpha", 11 | "Intended Audience :: Developers", 12 | "Programming Language :: Python", 13 | "Programming Language :: Python :: 3", 14 | "Programming Language :: Python :: 3 :: Only", 15 | "Programming Language :: Python :: 3.6", 16 | "Programming Language :: Python :: 3.7", 17 | "Programming Language :: Python :: 3.8", 18 | "Operating System :: OS Independent", 19 | "License :: OSI Approved :: MIT License", 20 | "Topic :: Software Development :: Libraries :: Python Modules", 21 | "Topic :: Internet", 22 | ] 23 | 24 | [tool.poetry.dependencies] 25 | python = "^3.6" 26 | pydantic = "^0.32.*" 27 | pyyaml = "^5.1.2" 28 | tomlkit = "^0.5.5" 29 | typing-extensions = "^3.7.4" 30 | attrs = "^19.1" 31 | class-doc = "0.2.0b" 32 | 33 | [tool.poetry.dev-dependencies] 34 | pytest = "^5.0.1" 35 | black = "^19.3b0" 36 | pytest-clarity = "^0.2.0-alpha.1" 37 | sphinx = "^2.2.0" 38 | sphinx-autoapi = "^1.1.0" 39 | m2r = "^0.2.1" 40 | sphinx_rtd_theme = "^0.4.3" 41 | sphinx-rst-builder = "^0.0.3" 42 | isort = "^5.6.4" 43 | flake8 = "^3.8.4" 44 | 45 | [build-system] 46 | requires = ["poetry>=0.12"] 47 | build-backend = "poetry.masonry.api" 48 | 49 | [tool.black] 50 | # UEAH thats works 51 | skip_string_normalization = true 52 | exclude = "(\\.?venv)" 53 | line-length = 79 54 | 55 | [tool.isort] 56 | profile = "black" 57 | line_length = 79 58 | known_first_party = ["pydantic_settings", "test"] 59 | -------------------------------------------------------------------------------- /test/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/danields761/pydantic-settings/bfdb531526306130c6dd0a704d197b18e2b0e72a/test/__init__.py -------------------------------------------------------------------------------- /test/conftest.py: -------------------------------------------------------------------------------- 1 | from dataclasses import dataclass 2 | 3 | from _pytest.fixtures import fixture 4 | from attr import dataclass as attr_dataclass 5 | from pydantic import BaseModel 6 | 7 | 8 | class Model1(BaseModel): 9 | foo: str 10 | bar: str 11 | 12 | 13 | @fixture 14 | def model2_cls(): 15 | class Model2(BaseModel): 16 | @dataclass 17 | class Foo: 18 | bar: int 19 | baz: str 20 | 21 | foo: Foo 22 | 23 | return Model2 24 | 25 | 26 | @fixture 27 | def model3_cls(): 28 | class Model3(BaseModel): 29 | @attr_dataclass 30 | class Foo: 31 | bar: int 32 | baz: str 33 | 34 | foo: Foo 35 | 36 | return Model3 37 | 38 | 39 | @dataclass 40 | class Model4: 41 | class Foo(BaseModel): 42 | bar: int 43 | baz: str 44 | 45 | foo: Foo 46 | 47 | 48 | @attr_dataclass 49 | class Model5: 50 | class Foo(BaseModel): 51 | bar: int 52 | baz: str 53 | 54 | foo: Foo 55 | 56 | 57 | class Model6(BaseModel): 58 | class Baz(BaseModel): 59 | bam: Model1 60 | 61 | baz: Baz 62 | baf: Model1 63 | -------------------------------------------------------------------------------- /test/test_deep_map_merger.py: -------------------------------------------------------------------------------- 1 | from pytest import raises 2 | 3 | from pydantic_settings.utils import deep_merge_mappings 4 | 5 | 6 | def test_simple_chain_map_possibilities(): 7 | m = deep_merge_mappings( 8 | deep_merge_mappings({'a': 1}, {'a': 2, 'b': 2}), {'c': 3} 9 | ) 10 | assert m['a'] == 1 11 | assert m['b'] == 2 12 | assert m['c'] == 3 13 | 14 | 15 | def test_nested_map(): 16 | m = deep_merge_mappings({'a': {'aa': 1}}, {'a': {'aa': 2, 'bb': 2}}) 17 | assert m['a']['aa'] == 1 18 | assert m['a']['bb'] == 2 19 | 20 | 21 | def test_simple_values_priority(): 22 | m = deep_merge_mappings({'a': 1}, {'a': {'aa': 2}}) 23 | assert m['a'] == 1 24 | with raises(TypeError) as exc_info: 25 | _ = m['a']['aa'] 26 | assert exc_info.value.args[0] == "'int' object is not subscriptable" 27 | -------------------------------------------------------------------------------- /test/test_errs_rendering.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | 3 | from pydantic import FloatError 4 | from pytest import mark 5 | 6 | from pydantic_settings import LoadingValidationError, TextLocation 7 | from pydantic_settings.errors import ExtendedErrorWrapper 8 | 9 | from .test_settings_base import Model1 10 | 11 | 12 | @mark.parametrize( 13 | 'args, res', 14 | [ 15 | ( 16 | ( 17 | [ 18 | ExtendedErrorWrapper( 19 | FloatError(), 20 | loc=('foo', 'bar'), 21 | source_loc=( 22 | 'T_FOO_BAR', 23 | TextLocation(1, 1, 1, 1, 10, 20), 24 | ), 25 | ) 26 | ], 27 | Model1, 28 | None, 29 | ), 30 | ( 31 | '1 validation error for Model1 ' 32 | '(in-memory buffer and environment variables):\n' 33 | 'foo -> bar from env "T_FOO_BAR" at 10:20\n' 34 | '\tvalue is not a valid float (type=type_error.float)' 35 | ), 36 | ), 37 | ( 38 | ( 39 | [ 40 | ExtendedErrorWrapper( 41 | FloatError(), 42 | loc=('foo', 'bar'), 43 | source_loc=TextLocation(1, 4, 1, 1, 10, 20), 44 | ) 45 | ], 46 | Model1, 47 | Path('/path/to/conf/file.json'), 48 | ), 49 | ( 50 | '1 validation error for Model1 ' 51 | '(configuration file at "/path/to/conf/file.json"):\n' 52 | 'foo -> bar from file at 1:4\n' 53 | '\tvalue is not a valid float (type=type_error.float)' 54 | ), 55 | ), 56 | ], 57 | ) 58 | def test_load_validation_err_rendering(args, res): 59 | assert LoadingValidationError(*args).render_error() == res 60 | -------------------------------------------------------------------------------- /test/test_fields_docs.py: -------------------------------------------------------------------------------- 1 | from pydantic import BaseModel, Field 2 | 3 | from pydantic_settings.attrs_docs import with_attrs_docs 4 | 5 | 6 | def test_pydantic_model_field_description(): 7 | @with_attrs_docs 8 | class PydanticModelFieldDocsModel(BaseModel): 9 | bar: int 10 | """bar description""" 11 | 12 | assert ( 13 | PydanticModelFieldDocsModel.__fields__['bar'].field_info.description 14 | == 'bar description' 15 | ) 16 | 17 | 18 | def test_pydantic_model_field_description_with_overriding(): 19 | @with_attrs_docs(override_existing=True) 20 | class PydanticModelFieldDocsModel(BaseModel): 21 | bar: int = Field(0, description='TEST OLD DESCRIPTION') 22 | """bar description""" 23 | 24 | assert ( 25 | PydanticModelFieldDocsModel.__fields__['bar'].field_info.description 26 | == 'bar description' 27 | ) 28 | 29 | 30 | def test_pydantic_model_field_description_without_overriding(): 31 | @with_attrs_docs(override_existing=False) 32 | class PydanticModelFieldDocsModel(BaseModel): 33 | bar: int = Field(0, description='TEST OLD DESCRIPTION') 34 | """bar description""" 35 | 36 | assert ( 37 | PydanticModelFieldDocsModel.__fields__['bar'].field_info.description 38 | == 'TEST OLD DESCRIPTION' 39 | ) 40 | -------------------------------------------------------------------------------- /test/test_json_ast_decoder.py: -------------------------------------------------------------------------------- 1 | from pytest import mark 2 | 3 | import pydantic_settings.decoder.json 4 | from pydantic_settings.decoder import json 5 | 6 | _create = pydantic_settings.decoder.json.ASTItem.create 7 | 8 | 9 | JSON_LIST = """[ 10 | 12345, 54321 11 | ]""" 12 | 13 | JSON_LIST_DICT = """[ 14 | { 15 | "key": 12345 16 | } 17 | ]""" 18 | 19 | 20 | @mark.parametrize( 21 | 'in_val, out_val', 22 | [ 23 | ( 24 | JSON_LIST, 25 | _create( 26 | 1, 27 | 1, 28 | 3, 29 | 2, 30 | [ 31 | _create(2, 5, 2, 10, 12345, pos=6, end_pos=11), 32 | _create(2, 12, 2, 17, 54321, pos=13, end_pos=18), 33 | ], 34 | pos=1, 35 | end_pos=20, 36 | ), 37 | ), 38 | ( 39 | JSON_LIST_DICT, 40 | _create( 41 | 1, 42 | 1, 43 | 5, 44 | 2, 45 | [ 46 | _create( 47 | 2, 48 | 5, 49 | 4, 50 | 6, 51 | { 52 | 'key': _create( 53 | 3, 16, 3, 21, 12345, pos=23, end_pos=28 54 | ) 55 | }, 56 | pos=7, 57 | end_pos=34, 58 | ) 59 | ], 60 | pos=1, 61 | end_pos=36, 62 | ), 63 | ), 64 | ('105', _create(1, 1, 1, 4, 105, pos=0, end_pos=3)), 65 | ('106.5', _create(1, 1, 1, 6, 106.5, pos=0, end_pos=5)), 66 | ('false', _create(1, 1, 1, 6, False, pos=0, end_pos=5)), 67 | ('true', _create(1, 1, 1, 5, True, pos=0, end_pos=4)), 68 | ('null', _create(1, 1, 1, 5, None, pos=0, end_pos=4)), 69 | ('[]', _create(1, 1, 1, 3, [], pos=1, end_pos=2)), 70 | ( 71 | '[12, 23]', 72 | _create( 73 | 1, 74 | 1, 75 | 1, 76 | 9, 77 | [ 78 | _create(1, 2, 1, 4, 12, pos=1, end_pos=3), 79 | _create(1, 6, 1, 8, 23, pos=5, end_pos=7), 80 | ], 81 | pos=1, 82 | end_pos=8, 83 | ), 84 | ), 85 | ( 86 | '[{"key": 12345}]', 87 | _create( 88 | 1, 89 | 1, 90 | 1, 91 | 17, 92 | [ 93 | _create( 94 | 1, 95 | 2, 96 | 1, 97 | 16, 98 | { 99 | 'key': _create( 100 | 1, 10, 1, 15, 12345, pos=9, end_pos=14 101 | ) 102 | }, 103 | pos=2, 104 | end_pos=15, 105 | ) 106 | ], 107 | pos=1, 108 | end_pos=16, 109 | ), 110 | ), 111 | ('{}', _create(1, 1, 1, 3, {}, pos=1, end_pos=2)), 112 | ( 113 | '{"key": 1}', 114 | _create( 115 | 1, 116 | 1, 117 | 1, 118 | 11, 119 | {'key': _create(1, 9, 1, 10, 1, pos=8, end_pos=9)}, 120 | pos=1, 121 | end_pos=10, 122 | ), 123 | ), 124 | ], 125 | ) 126 | def test_json_ast1(in_val, out_val): 127 | assert json.loads(in_val) == out_val 128 | 129 | 130 | @mark.parametrize( 131 | 'in_val, out_json', 132 | [ 133 | ('105', 105), 134 | ('106.5', 106.5), 135 | ('false', False), 136 | ('true', True), 137 | ('null', None), 138 | ('[]', []), 139 | ('[12, 23]', [12, 23]), 140 | ('[{"key": 12345}]', [{"key": 12345}]), 141 | ('{}', {}), 142 | ('{"key": 1}', {"key": 1}), 143 | ('{"key": "bar"}', {"key": "bar"}), 144 | ], 145 | ) 146 | def test_get_json_value(in_val, out_json): 147 | assert json.loads(in_val).get_json_value() == out_json 148 | -------------------------------------------------------------------------------- /test/test_load.py: -------------------------------------------------------------------------------- 1 | import tempfile 2 | from io import StringIO 3 | from pathlib import Path 4 | from typing import List 5 | 6 | from pydantic import FloatError, IntegerError, StrError 7 | from pytest import mark, raises 8 | 9 | from pydantic_settings import ( 10 | BaseSettingsModel, 11 | LoadingError, 12 | LoadingValidationError, 13 | TextLocation, 14 | load_settings, 15 | ) 16 | from pydantic_settings.errors import ExtendedErrorWrapper 17 | 18 | 19 | def per_location_errors(load_err): 20 | return ( 21 | (raw_err.source_loc, raw_err.exc) 22 | for raw_err in load_err.raw_errors 23 | if isinstance(raw_err, ExtendedErrorWrapper) 24 | ) 25 | 26 | 27 | class Settings(BaseSettingsModel): 28 | class Config: 29 | env_prefix = 'T' 30 | 31 | foo: int 32 | bar: float 33 | 34 | 35 | class Settings2(BaseSettingsModel): 36 | class Config: 37 | env_prefix = 'A' 38 | 39 | settings_list: List[Settings] 40 | settings: Settings 41 | foo: str = '' 42 | 43 | 44 | @mark.parametrize( 45 | 'model_cls, content, environ, locations', 46 | [ 47 | ( 48 | Settings, 49 | '{"bar": 1234}', 50 | {'T_FOO': 'AKA INT'}, 51 | [(('T_FOO', None), IntegerError)], 52 | ), 53 | ( 54 | Settings, 55 | '{"bar": 1234}', 56 | {'T_foo': 'AKA INT'}, 57 | [(('T_foo', None), IntegerError)], 58 | ), 59 | ( 60 | Settings, 61 | '{"bar": "AKA FLOAT"}', 62 | {'T_foo': 101}, 63 | [(TextLocation(1, 9, 1, 20, 9, 19), FloatError)], 64 | ), 65 | ( 66 | Settings2, 67 | '{}', 68 | {'A_SETTINGS_FOO': 'INVALID INT', 'A_SETTINGS_BAR': 1243}, 69 | [(('A_SETTINGS_FOO', None), IntegerError)], 70 | ), 71 | ( 72 | Settings2, 73 | ( 74 | '{"settings_list": [], "settings": ' 75 | '{"foo": 100, "bar": "INVALID FLOAT"}, "foo": []}' 76 | ), 77 | {}, 78 | [ 79 | (TextLocation(1, 55, 1, 70, 55, 69), FloatError), 80 | (TextLocation(1, 80, 1, 82, 80, 81), StrError), 81 | ], 82 | ), 83 | ( 84 | Settings2, 85 | '{"settings_list": [], "settings": {"foo": 100}, "foo": []}', 86 | {'A_SETTINGS_BAR': 'INVALID FLOAT'}, 87 | [ 88 | (('A_SETTINGS_BAR', None), FloatError), 89 | (TextLocation(1, 56, 1, 58, 56, 57), StrError), 90 | ], 91 | ), 92 | ], 93 | ) 94 | def test_validation_errors(model_cls, content, environ, locations): 95 | with raises(LoadingError) as exc_info: 96 | load_settings( 97 | model_cls, 98 | content, 99 | type_hint='json', 100 | load_env=True, 101 | environ=environ, 102 | ) 103 | 104 | assert exc_info.type is LoadingValidationError 105 | assert [loc for loc, _ in per_location_errors(exc_info.value)] == [ 106 | loc for loc, _ in locations 107 | ] 108 | assert [type(err) for _, err in per_location_errors(exc_info.value)] == [ 109 | err_cls for _, err_cls in locations 110 | ] 111 | 112 | 113 | def empty_tmp_file_creator(extension): 114 | def create(): 115 | p = Path(tempfile.mktemp('.' + extension)) 116 | p.touch() 117 | return p 118 | 119 | return create 120 | 121 | 122 | @mark.parametrize( 123 | 'any_content, load_env, type_hint, expect_err_msg', 124 | [ 125 | ( 126 | Path('/not/exists.cfg'), 127 | False, 128 | None, 129 | """LoadingError: Loader ".cfg" isn't supported""", 130 | ), 131 | ('', False, 'ini', """LoadingError: Loader "ini" isn't supported"""), 132 | ( 133 | StringIO(''), 134 | False, 135 | 'ini', 136 | """LoadingError: Loader "ini" isn't supported""", 137 | ), 138 | ( 139 | StringIO(''), 140 | False, 141 | None, 142 | ( 143 | 'LoadingError: "type_hint" argument is ' 144 | 'required if content is not an ' 145 | 'instance of "pathlib.Path" class' 146 | ), 147 | ), 148 | ( 149 | Path('/not/exists.cfg'), 150 | False, 151 | 'DEFINITELY NOT A TYPE HINT', 152 | ( 153 | 'LoadingError: Loader "DEFINITELY NOT A TYPE HINT" ' 154 | "isn't supported" 155 | ), 156 | ), 157 | ( 158 | None, 159 | False, 160 | None, 161 | 'LoadingError: no sources provided to load settings from', 162 | ), 163 | ], 164 | ) 165 | def test_load_settings_general_errors( 166 | any_content, load_env, type_hint, expect_err_msg 167 | ): 168 | if callable(any_content): 169 | any_content = any_content() 170 | 171 | with raises(LoadingError) as err_info: 172 | load_settings( 173 | Settings2, 174 | any_content, 175 | load_env=load_env, 176 | type_hint=type_hint, 177 | _content_reader=lambda _: '', 178 | ) 179 | 180 | assert str(err_info.value) == expect_err_msg 181 | 182 | 183 | def test_file_not_found(): 184 | path = Path(tempfile.mktemp()) 185 | with raises(LoadingError) as err_info: 186 | load_settings(Settings2, path) 187 | 188 | assert isinstance(err_info.value.cause, FileNotFoundError) 189 | assert err_info.value.file_path == path 190 | -------------------------------------------------------------------------------- /test/test_restorer.py: -------------------------------------------------------------------------------- 1 | from typing import List, Tuple, Union 2 | 3 | from pydantic import BaseModel 4 | from pytest import mark 5 | 6 | from pydantic_settings import TextLocation 7 | from pydantic_settings.decoder.json import decode_document 8 | from pydantic_settings.restorer import ( 9 | ModelShapeRestorer, 10 | _build_model_flat_map, 11 | ) 12 | 13 | from .conftest import Model1, Model4, Model5, Model6 14 | 15 | 16 | class YAModel(BaseModel): 17 | list: List[str] 18 | 19 | 20 | class WithComplexTypes(BaseModel): 21 | list: List[str] # will be ignored because not supported 22 | tuple: Tuple[str] 23 | union: Union[str, int, bool] # path pointer end here 24 | models_union: Union[Model1, YAModel] # will expand nested path 25 | # expand first model, but still allow this point 26 | model_union_with_simple: Union[Model1, int] 27 | 28 | 29 | def test_with_complex_types(): 30 | assert { 31 | key: tuple(val) 32 | for key, val in _build_model_flat_map( 33 | WithComplexTypes, 't', str.casefold 34 | ).items() 35 | } == { 36 | 't_list': (('list',), False, False), 37 | 't_tuple': (('tuple',), False, False), 38 | 't_union': (('union',), False, False), 39 | 't_models_union': (('models_union',), True, True), 40 | 't_models_union_foo': (('models_union', 'foo'), False, False), 41 | 't_models_union_bar': (('models_union', 'bar'), False, False), 42 | 't_models_union_list': (('models_union', 'list'), False, False), 43 | 't_model_union_with_simple': ( 44 | ('model_union_with_simple',), 45 | True, 46 | False, 47 | ), 48 | 't_model_union_with_simple_foo': ( 49 | ('model_union_with_simple', 'foo'), 50 | False, 51 | False, 52 | ), 53 | 't_model_union_with_simple_bar': ( 54 | ('model_union_with_simple', 'bar'), 55 | False, 56 | False, 57 | ), 58 | } 59 | 60 | 61 | def test_flat_model(): 62 | assert _build_model_flat_map(Model1, 'test', str.casefold) == { 63 | 'test_foo': (('foo',), False, False), 64 | 'test_bar': (('bar',), False, False), 65 | } 66 | 67 | 68 | @mark.parametrize('model_cls', [Model4, Model5]) 69 | def test_complex_nested_models(model_cls): 70 | assert _build_model_flat_map(model_cls, 'test', str.casefold) == { 71 | 'test_foo': (('foo',), True, True), 72 | 'test_foo_bar': (('foo', 'bar'), False, False), 73 | 'test_foo_baz': (('foo', 'baz'), False, False), 74 | } 75 | 76 | 77 | @mark.parametrize( 78 | 'model_cls, input_val, result, locations', 79 | [ 80 | ( 81 | Model1, 82 | {'test_foo': 'VAL1', 'test_bar': 'VAL2'}, 83 | {'foo': 'VAL1', 'bar': 'VAL2'}, 84 | {('foo',): ('test_foo', None), ('bar',): ('test_bar', None)}, 85 | ), 86 | ( 87 | Model4, 88 | {'test_foo': '{"bar": "VAL1", "baz": "VAL2"}'}, 89 | {'foo': {'bar': 'VAL1', 'baz': 'VAL2'}}, 90 | { 91 | ('foo', 'bar'): ( 92 | 'test_foo', 93 | TextLocation( 94 | line=1, 95 | col=9, 96 | end_line=1, 97 | end_col=15, 98 | pos=9, 99 | end_pos=14, 100 | ), 101 | ), 102 | ('foo', 'baz'): ( 103 | 'test_foo', 104 | TextLocation( 105 | line=1, 106 | col=24, 107 | end_line=1, 108 | end_col=30, 109 | pos=24, 110 | end_pos=29, 111 | ), 112 | ), 113 | }, 114 | ), 115 | ( 116 | Model4, 117 | {'test_foo_bar': 'VAL1', 'test_foo_baz': 'VAL2'}, 118 | {'foo': {'bar': 'VAL1', 'baz': 'VAL2'}}, 119 | { 120 | ('foo', 'bar'): ('test_foo_bar', None), 121 | ('foo', 'baz'): ('test_foo_baz', None), 122 | }, 123 | ), 124 | ( 125 | Model4, 126 | {'test_foo_bar': 'VAL1'}, 127 | {'foo': {'bar': 'VAL1'}}, 128 | {('foo', 'bar'): ('test_foo_bar', None)}, 129 | ), 130 | ( 131 | Model6, 132 | {'test_baz_bam_foo': 'VAL1', 'test_baz_bam_bar': 'VAL2'}, 133 | {'baz': {'bam': {'foo': 'VAL1', 'bar': 'VAL2'}}}, 134 | { 135 | ('baz', 'bam', 'foo'): ('test_baz_bam_foo', None), 136 | ('baz', 'bam', 'bar'): ('test_baz_bam_bar', None), 137 | }, 138 | ), 139 | ( 140 | Model6, 141 | {'test_baz_bam': '{"foo": "VAL1", "bar": "VAL2"}'}, 142 | {'baz': {'bam': {'foo': 'VAL1', 'bar': 'VAL2'}}}, 143 | { 144 | ('baz', 'bam', 'foo'): ( 145 | 'test_baz_bam', 146 | TextLocation( 147 | line=1, 148 | col=9, 149 | end_line=1, 150 | end_col=15, 151 | pos=9, 152 | end_pos=14, 153 | ), 154 | ), 155 | ('baz', 'bam', 'bar'): ( 156 | 'test_baz_bam', 157 | TextLocation( 158 | line=1, 159 | col=24, 160 | end_line=1, 161 | end_col=30, 162 | pos=24, 163 | end_pos=29, 164 | ), 165 | ), 166 | }, 167 | ), 168 | ( 169 | Model6, 170 | {'test_baz': '{"bam": {"foo": "VAL1", "bar": "VAL2"}}'}, 171 | {'baz': {'bam': {'foo': 'VAL1', 'bar': 'VAL2'}}}, 172 | { 173 | ('baz', 'bam', 'foo'): ( 174 | 'test_baz', 175 | TextLocation( 176 | line=1, 177 | col=17, 178 | end_line=1, 179 | end_col=23, 180 | pos=17, 181 | end_pos=22, 182 | ), 183 | ) 184 | }, 185 | ), 186 | ( 187 | Model6, 188 | { 189 | 'test_baz_bam_foo': 'VAL1', 190 | 'test_baz_bam_bar': 'VAL2', 191 | 'test_baf_foo': 'VAL3', 192 | }, 193 | { 194 | 'baz': {'bam': {'foo': 'VAL1', 'bar': 'VAL2'}}, 195 | 'baf': {'foo': 'VAL3'}, 196 | }, 197 | { 198 | ('baz', 'bam', 'foo'): ('test_baz_bam_foo', None), 199 | ('baz', 'bam', 'bar'): ('test_baz_bam_bar', None), 200 | ('baf', 'foo'): ('test_baf_foo', None), 201 | }, 202 | ), 203 | ], 204 | ) 205 | def test_restore_model(model_cls, input_val, result, locations): 206 | values, errs = ModelShapeRestorer( 207 | model_cls, 'TEST', False, decode_document 208 | ).restore(input_val) 209 | assert values, errs == (result, []) 210 | 211 | assert {loc: values.get_location(loc) for loc in locations} == locations 212 | -------------------------------------------------------------------------------- /test/test_settings_base.py: -------------------------------------------------------------------------------- 1 | import json 2 | 3 | from pydantic import BaseModel, MissingError, ValidationError 4 | from pytest import raises 5 | 6 | from pydantic_settings.base import BaseSettingsModel 7 | from pydantic_settings.errors import ExtendedErrorWrapper 8 | 9 | from .conftest import Model1 10 | 11 | 12 | class SettingModel1(BaseSettingsModel): 13 | class Baz(BaseModel): 14 | baf: Model1 15 | 16 | baz: Baz 17 | 18 | 19 | def test_invalid_env_var_assignment(): 20 | with raises(ValidationError) as exc_info: 21 | SettingModel1.from_env( 22 | {'APP_BAZ': 'SOMETHING DEFINITELY NOT A JSON OR TOML STRING'}, 23 | ignore_restore_errs=False, 24 | ) 25 | 26 | assert len(exc_info.value.raw_errors) == 2 27 | 28 | missing_field_err = exc_info.value.raw_errors[0] 29 | env_undecodable_value_err = exc_info.value.raw_errors[1] 30 | 31 | assert env_undecodable_value_err.loc_tuple() == ('baz',) 32 | assert isinstance(env_undecodable_value_err, ExtendedErrorWrapper) 33 | assert env_undecodable_value_err.source_loc == ('APP_BAZ', None) 34 | assert isinstance(env_undecodable_value_err.exc, json.JSONDecodeError) 35 | 36 | assert not isinstance(missing_field_err, ExtendedErrorWrapper) 37 | assert missing_field_err.loc_tuple() == ('baz',) 38 | assert isinstance(missing_field_err.exc, MissingError) 39 | 40 | 41 | def test_settings_model(): 42 | assert SettingModel1.from_env( 43 | {'APP_BAZ_BAF_FOO': 'TEST_VAL1', 'APP_BAZ_BAF_BAR': 'TEST_VAL2'} 44 | ) == SettingModel1( 45 | baz=SettingModel1.Baz(baf=Model1(foo='TEST_VAL1', bar='TEST_VAL2')) 46 | ) 47 | 48 | 49 | def test_settings_model_attrs_docs_created_automatically(): 50 | class SettingsModel(BaseSettingsModel): 51 | class Config: 52 | build_attr_docs = True 53 | 54 | bar: int 55 | """bar description""" 56 | 57 | assert ( 58 | SettingsModel.__fields__['bar'].field_info.description 59 | == 'bar description' 60 | ) 61 | --------------------------------------------------------------------------------