├── climatecontrol ├── py.typed ├── ext │ ├── __init__.py │ ├── pydantic.py │ └── dataclasses.py ├── __init__.py ├── constants.py ├── exceptions.py ├── logtools.py ├── cli_utils.py ├── utils.py ├── file_loaders.py ├── env_parser.py ├── fragment.py ├── processors.py └── core.py ├── pytest.ini ├── tests ├── test_file_loaders.py ├── ext │ ├── test_dataclasses.py │ └── test_pydantic.py ├── test_env_parser.py ├── conftest.py ├── test_fragment.py └── test_settings.py ├── MANIFEST.in ├── mypy.ini ├── environment.yml ├── .coveragerc ├── .isort.cfg ├── setup.py ├── .flake8 ├── .gitignore ├── LICENSE ├── CONTRIBUTING.md ├── tasks.py ├── .github └── workflows │ └── ci.yaml ├── pyproject.toml ├── CHANGELOG.md └── README.rst /climatecontrol/py.typed: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /pytest.ini: -------------------------------------------------------------------------------- 1 | [pytest] 2 | addopts = --doctest-modules 3 | -------------------------------------------------------------------------------- /tests/test_file_loaders.py: -------------------------------------------------------------------------------- 1 | """Test file loaders.""" 2 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include LICENCE README.rst .flake8 setup.py setup.cfg 2 | -------------------------------------------------------------------------------- /climatecontrol/ext/__init__.py: -------------------------------------------------------------------------------- 1 | """Climatecontrol extensions package.""" 2 | -------------------------------------------------------------------------------- /mypy.ini: -------------------------------------------------------------------------------- 1 | [mypy] 2 | ignore_missing_imports=True 3 | disallow_untyped_defs=False 4 | check_untyped_defs=True 5 | -------------------------------------------------------------------------------- /environment.yml: -------------------------------------------------------------------------------- 1 | name: climatecontrol 2 | dependencies: 3 | - python=3.10 4 | - pip=19 5 | - ipython 6 | - pip: 7 | - pdbpp 8 | -------------------------------------------------------------------------------- /.coveragerc: -------------------------------------------------------------------------------- 1 | [run] 2 | branch = true 3 | source = . 4 | omit = setup.py,tasks.py 5 | [report] 6 | show_missing = true 7 | fail_under = 95 8 | -------------------------------------------------------------------------------- /.isort.cfg: -------------------------------------------------------------------------------- 1 | [settings] 2 | multi_line_output=3 3 | include_trailing_comma=True 4 | force_grid_wrap=0 5 | use_parentheses=True 6 | line_length=88 7 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | """Setup climatecontrol.""" 4 | 5 | from setuptools import setup 6 | 7 | setup(name="climatecontrol") 8 | -------------------------------------------------------------------------------- /.flake8: -------------------------------------------------------------------------------- 1 | [flake8] 2 | # D202 No blank lines allowed after function docstring: not compatible with black 3 | # E721 Type comparison - intentional in __eq__ methods for exact type checks 4 | ignore = E203, E501, W503, W503, D105, D202, E721 5 | max-line-length = 88 6 | max-complexity = 10 7 | doctests = true 8 | exclude = .venv,.git,__pycache__,build,dist 9 | -------------------------------------------------------------------------------- /climatecontrol/__init__.py: -------------------------------------------------------------------------------- 1 | """CLIMATECONTROL controls your apps configuration environment. 2 | 3 | It is a Python library for loading app configurations from files and/or 4 | namespaced environment variables. 5 | 6 | :licence: MIT, see LICENSE file for more details. 7 | 8 | """ 9 | 10 | from .core import Climate 11 | from .exceptions import SettingsLoadError, SettingsValidationError 12 | 13 | climate = Climate() 14 | 15 | __all__ = [ 16 | "climate", 17 | "Climate", 18 | "SettingsValidationError", 19 | "SettingsLoadError", 20 | ] 21 | -------------------------------------------------------------------------------- /climatecontrol/constants.py: -------------------------------------------------------------------------------- 1 | """Constants used in other modules.""" 2 | 3 | from enum import Enum 4 | 5 | 6 | class _Removed(Enum): 7 | """Object representing an empty item.""" 8 | 9 | REMOVED = None 10 | 11 | def __repr__(self): 12 | return "" # pragma: nocover 13 | 14 | 15 | REMOVED = _Removed.REMOVED 16 | 17 | 18 | class _Empty(Enum): 19 | """Object representing an empty item.""" 20 | 21 | EMPTY = None 22 | 23 | def __repr__(self): 24 | return "" # pragma: nocover 25 | 26 | 27 | EMPTY = _Empty.EMPTY 28 | -------------------------------------------------------------------------------- /climatecontrol/exceptions.py: -------------------------------------------------------------------------------- 1 | """Exceptions used in climatecontrol.""" 2 | 3 | 4 | class SettingsValidationError(ValueError): 5 | """Failed to validate settings.""" 6 | 7 | 8 | class SettingsLoadError(ValueError): 9 | """Settings file is neither path nor content.""" 10 | 11 | 12 | class ContentLoadError(SettingsLoadError): 13 | """Contents could not be loaded.""" 14 | 15 | 16 | class FileLoadError(SettingsLoadError): 17 | """Contents could not be loaded.""" 18 | 19 | 20 | class NoCompatibleLoaderFoundError(SettingsLoadError): 21 | """Settings could not be loaded do to format or file being incompatible.""" 22 | -------------------------------------------------------------------------------- /climatecontrol/logtools.py: -------------------------------------------------------------------------------- 1 | """Logging utilities.""" 2 | 3 | import logging 4 | import time 5 | from logging import config as logging_config 6 | from typing import Any 7 | 8 | formatter: Any = logging.Formatter 9 | formatter.converter = time.gmtime 10 | logger: logging.Logger = logging.getLogger(__name__) 11 | 12 | 13 | DEFAULT_LOG_SETTINGS = { 14 | "version": 1, 15 | "disable_existing_loggers": False, 16 | "formatters": { 17 | "default": {"format": "%(asctime)s UTC [%(levelname)s] %(name)s: %(message)s"} 18 | }, 19 | "handlers": { 20 | "console": { 21 | "class": "logging.StreamHandler", 22 | "formatter": "default", 23 | "level": "DEBUG", 24 | } 25 | }, 26 | "root": {"level": "INFO", "handlers": ["console"]}, 27 | } 28 | 29 | 30 | __ALL__ = [DEFAULT_LOG_SETTINGS, logging, logger, logging_config] 31 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | env/ 12 | bin/ 13 | build/ 14 | develop-eggs/ 15 | dist/ 16 | downloads/ 17 | eggs/ 18 | .eggs/ 19 | lib/ 20 | lib64/ 21 | parts/ 22 | sdist/ 23 | var/ 24 | *.egg-info/ 25 | .installed.cfg 26 | *.egg 27 | 28 | # PyInstaller 29 | # Usually these files are written by a python script from a template 30 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 31 | *.manifest 32 | *.spec 33 | 34 | # Installer logs 35 | pip-log.txt 36 | pip-delete-this-directory.txt 37 | 38 | # Unit test / coverage reports 39 | htmlcov/ 40 | .tox/ 41 | .coverage 42 | .coverage.* 43 | .cache 44 | nosetests.xml 45 | coverage.xml 46 | *,cover 47 | .hypothesis/ 48 | .pytest_cache/ 49 | 50 | # Translations 51 | *.mo 52 | *.pot 53 | 54 | # Django stuff: 55 | *.log 56 | 57 | # Sphinx documentation 58 | docs/_build/ 59 | 60 | # PyBuilder 61 | target/ 62 | 63 | #Ipython Notebook 64 | .ipynb_checkpoints 65 | 66 | # Type Checking 67 | .mypy_cache 68 | 69 | # Misc 70 | *.swp 71 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Copyright (c) 2016, 2017, 2018, 2019, 2020, 2021, 2022 Davis Kirkendall 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | Contributions are always welcome! 2 | 3 | Writing new issues 4 | ------------------ 5 | 6 | If you are using climatecontrol and find a 7 | bug, please write an issue including such things as: 8 | 9 | * climatecontrol version (run `pip show climatecontrol` and look for the version) 10 | * Python version 11 | * Current operating system + version (Linux / Windows / Mac) 12 | 13 | Development 14 | ----------- 15 | 16 | If you find a bug and want to fix it yourself or want to implement a new 17 | feature, there are a few tools that can help you do this: 18 | 19 | First install the dev dependencies 20 | 21 | ```sh 22 | pip install -e ".[dev]" 23 | ``` 24 | 25 | Now you can use the `invoke` command to run formatting, code checks and tests as needed: 26 | 27 | ```sh 28 | invoke --list # show available commands 29 | 30 | Available tasks: 31 | 32 | all Run format, check and test all in one command. 33 | check Check the code is ok by running flake8, black, isort and mypy. 34 | format Format the code to make it compatible with the `check` command. 35 | test Run all tests using pytest. 36 | ``` 37 | 38 | During development, run these as needed. The same checks run in CI so if they 39 | pass, you should be ok. For example you can run: 40 | 41 | ```sh 42 | invoke all # format, check and run tests 43 | ``` 44 | -------------------------------------------------------------------------------- /tasks.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | """Development tasks for convenience.""" 4 | 5 | from invoke import Collection, Task, task 6 | 7 | 8 | @task 9 | def format(c): 10 | """Format the code to make it compatible with the `check` command.""" 11 | print("> sorting imports]") 12 | c.run("isort .") 13 | 14 | print("> [painting all the code black]") 15 | c.run("black .") 16 | 17 | 18 | @task 19 | def test(c, aliases=["pytest"]): 20 | """Run all tests using pytest.""" 21 | print("") 22 | print("[running pytest]") 23 | c.run("coverage run -m pytest") 24 | c.run("coverage report") 25 | 26 | 27 | @task 28 | def check(c): 29 | """Check the code is ok by running flake8, black, isort and mypy.""" 30 | print("> check that code is formatted well") 31 | c.run("black --check .") 32 | c.run("isort --check-only .") 33 | print("> lint with flake8") 34 | c.run("flake8") 35 | print("> typecheck") 36 | c.run("mypy .") 37 | 38 | 39 | @task(pre=[format, check, test]) 40 | def all(c): 41 | """Run format, check and test all in one command.""" 42 | 43 | 44 | # Configure default collection to change default pty setting 45 | # Pytest will run much nicer if pty is set to true. 46 | ns = Collection(*(item for item in locals().values() if isinstance(item, Task))) 47 | ns.configure({"run": {"pty": True}}) 48 | -------------------------------------------------------------------------------- /.github/workflows/ci.yaml: -------------------------------------------------------------------------------- 1 | name: ci 2 | 3 | on: 4 | push: 5 | branches: 6 | - master 7 | pull_request: 8 | 9 | jobs: 10 | test: 11 | name: test (py${{ matrix.python-version }}, pydantic${{ matrix.pydantic-version }}) 12 | runs-on: ubuntu-latest 13 | strategy: 14 | matrix: 15 | python-version: ["3.10", "3.11", "3.12"] 16 | pydantic-version: ["1", "2"] 17 | 18 | steps: 19 | - uses: actions/checkout@v2 20 | - name: Set up Python ${{ matrix.python-version }} 21 | uses: actions/setup-python@v2 22 | with: 23 | python-version: ${{ matrix.python-version }} 24 | - name: Install uv 25 | uses: astral-sh/setup-uv@v5 26 | with: 27 | version: "0.9.8" 28 | enable-cache: true 29 | - name: Install dependencies 30 | run: | 31 | uv sync --all-extras 32 | - name: Install Pydantic ${{ matrix.pydantic-version }} 33 | run: | 34 | if [ "${{ matrix.pydantic-version }}" = "1" ]; then 35 | uv pip install "pydantic>=1.9.0,<2.0" 36 | else 37 | uv pip install "pydantic>=2.0,<3.0" 38 | fi 39 | - name: Run checks and lint code 40 | run: | 41 | uv run invoke check 42 | - name: Run tests 43 | run: | 44 | uv run invoke test 45 | uv run coverage xml 46 | - name: Upload coverage to Codecov 47 | uses: codecov/codecov-action@v1 48 | -------------------------------------------------------------------------------- /tests/ext/test_dataclasses.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | from dataclasses import dataclass, field 3 | 4 | import dacite 5 | import pytest 6 | 7 | from climatecontrol.ext.dataclasses import Climate 8 | 9 | 10 | def test_climate_simple(mock_empty_os_environ): 11 | """Test basic dataclass climate object.""" 12 | 13 | @dataclass 14 | class A: 15 | a: int = 1 16 | b: str = "yeah" 17 | 18 | climate = Climate(dataclass_cls=A) 19 | assert climate.settings == A() 20 | 21 | 22 | def test_climate(mock_empty_os_environ): 23 | """Test climate with dataclasses.""" 24 | 25 | @dataclass 26 | class C: 27 | d: str = "weee" 28 | e: int = 0 29 | 30 | @dataclass 31 | class A: 32 | c: C = field(default_factory=C) 33 | a: int = 1 34 | b: str = "yeah" 35 | 36 | climate = Climate(dataclass_cls=A) 37 | assert climate.settings.c.d == "weee" 38 | climate.update({"b": "changed"}) 39 | assert len(climate._fragments) == 1 40 | assert climate.settings.b == "changed" 41 | assert climate.settings.c == C() 42 | climate.update({"c": {"d": "test"}}) 43 | assert len(climate._fragments) == 2 44 | assert climate.settings.c == C(d="test") 45 | 46 | climate.update({"c": C(d="test2")}) 47 | assert len(climate._fragments) == 3 48 | assert climate.settings.c == C(d="test2") 49 | 50 | with pytest.raises(dacite.WrongTypeError): 51 | # assigning an int to a "str" field should fail 52 | climate.update({"c": {"d": 4}}) 53 | # no update should have been performed. 54 | assert len(climate._fragments) == 3 55 | -------------------------------------------------------------------------------- /climatecontrol/cli_utils.py: -------------------------------------------------------------------------------- 1 | """CLI utils for easy command line extras.""" 2 | 3 | import click 4 | 5 | from climatecontrol import core 6 | 7 | 8 | def click_settings_file_option( 9 | settings_obj: core.Climate, click_obj=click, option_name="settings", **kw 10 | ): 11 | """Build a `click` option decorator. 12 | 13 | Args: 14 | settings_obj: settings object to load configuration into. 15 | click_obj: if a command 16 | 17 | Example: 18 | Given a command line script `cli.py`: 19 | 20 | .. code-block:: python 21 | 22 | import click 23 | from climatecontrol import core, cli_utils 24 | 25 | settings_map = settings_parser.Climate(env_prefix='TEST_STUFF') 26 | 27 | @click.command() 28 | @cli_utils.click_settings_file_option(settings_map) 29 | def tmp_cli(): 30 | pass 31 | 32 | And running the script: 33 | 34 | .. code-block:: bash 35 | 36 | python cli.py --settings 'my_settings_file.yaml' 37 | 38 | will load settings from `my_settings_file.yaml` into the `settings_map` 39 | object which can then be used in the script. 40 | 41 | """ 42 | 43 | def validate(ctx, param, value): 44 | if value: 45 | settings_obj.settings_files = value 46 | settings_obj.update() 47 | 48 | option_kwargs = dict( 49 | help="Settings file path for loading settings from file.", 50 | callback=validate, 51 | type=click_obj.Path(exists=True, dir_okay=False, resolve_path=True), 52 | expose_value=False, 53 | is_eager=True, 54 | multiple=True, 55 | ) 56 | option_kwargs.update(kw) 57 | option = click_obj.option( 58 | "--{}".format(option_name), "-{}".format(option_name[0]), **option_kwargs 59 | ) 60 | return option 61 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [project] 2 | name = "climatecontrol" 3 | version = "0.13.0" 4 | description = "Python library for loading app configurations from files and/or namespaced environment variables." 5 | authors = [ 6 | {name = "Davis Kirkendall", email = "davis.e.kirkendall@gmail.com"} 7 | ] 8 | license = "MIT" 9 | readme = "README.rst" 10 | requires-python = ">=3.10,<4.0" 11 | keywords = ["climatecontrol", "configuration", "environment"] 12 | classifiers = [ 13 | "Development Status :: 4 - Beta", 14 | "Intended Audience :: Developers", 15 | "Natural Language :: English", 16 | "Operating System :: OS Independent", 17 | "Programming Language :: Python :: 3", 18 | "Programming Language :: Python :: 3.10", 19 | "Programming Language :: Python :: 3.11", 20 | "Programming Language :: Python :: 3.12", 21 | "Topic :: Utilities", 22 | "Topic :: Software Development :: Libraries", 23 | "Topic :: Software Development :: Libraries :: Python Modules", 24 | ] 25 | dependencies = [ 26 | "wrapt>=1.14,<2.0", 27 | ] 28 | 29 | [project.optional-dependencies] 30 | dataclasses = [ 31 | "dacite>=1.6,<2.0", 32 | "pydantic>=1.7.4,<3.0", 33 | ] 34 | dev = [ 35 | "pytest>=6.2.2", 36 | "pytest-mock>=3.5.1", 37 | "coverage>=5.4", 38 | "tomli>=2.0.1", 39 | "PyYAML>=6.0.1", 40 | "click>=8.0", 41 | "invoke>=2.0", 42 | "black>=22.1.0", 43 | "mypy>=1.0", 44 | "isort>=5.10.1", 45 | "flake8>=6.0", 46 | "dacite>=1.6.0", 47 | "pydantic>=1.9.0,<3.0", 48 | "types-PyYAML>=6.0.4", 49 | "tomli-w>=1.0.0", 50 | ] 51 | 52 | [project.urls] 53 | Homepage = "https://github.com/daviskirk/climatecontrol" 54 | Repository = "https://github.com/daviskirk/climatecontrol" 55 | Documentation = "https://github.com/daviskirk/climatecontrol" 56 | 57 | [build-system] 58 | requires = ["uv_build>=0.9.8,<0.10.0"] 59 | build-backend = "uv_build" 60 | 61 | [tool.uv.build-backend] 62 | module-root = "" 63 | -------------------------------------------------------------------------------- /climatecontrol/utils.py: -------------------------------------------------------------------------------- 1 | """Utility functions.""" 2 | 3 | import collections 4 | import collections.abc 5 | import json 6 | from copy import deepcopy 7 | from itertools import zip_longest 8 | from typing import Any, Mapping, Sequence, Union 9 | 10 | from climatecontrol.constants import EMPTY 11 | 12 | 13 | def get_nested(obj: Union[Mapping, Sequence], path: Sequence) -> Any: 14 | """Get element of a sequence or map based on multiple nested keys. 15 | 16 | Args: 17 | obj: Object to index from 18 | path: Sequence of nested keys. 19 | 20 | Example: 21 | >>> get_nested({'a': {'b': [1, 2]}}, ['a', 'b', 0]) 22 | 1 23 | 24 | """ 25 | result = obj 26 | traversed = [] 27 | for subpath in path: 28 | traversed.append(subpath) 29 | try: 30 | result = result[subpath] 31 | except (KeyError, IndexError, TypeError) as e: 32 | raise type(e)(str(e.args[0]) + " at nested path: {}".format(traversed)) 33 | return result 34 | 35 | 36 | def merge_nested(d: Any, u: Any) -> Any: 37 | """Merge nested mapping or sequence ``d`` with nested mapping or sequence ``u``. 38 | 39 | Dictionaries are merge recursively while sequences are merged by index (and 40 | expanded automatically if longer). Note that the special value 41 | :data:``EMPTY`` can be used in `u` to NOT overwrite a sequence item. 42 | 43 | Example: 44 | merge_nested({'a': {'b': [3, {'c': 4}, 5]}}, {'a': {'b': [EMPTY, {'d': 6}]}}) 45 | {'a': {'b': [3, {'c': 4, 'd': 6}, 5]}} 46 | 47 | """ 48 | if isinstance(d, Mapping): 49 | new_dict: dict = dict(**d) 50 | if not isinstance(u, collections.abc.Mapping): 51 | return deepcopy(u) 52 | for k, u_v in u.items(): 53 | new_dict[k] = merge_nested(d.get(k), u_v) 54 | return new_dict 55 | elif isinstance(d, collections.abc.Sequence) and not isinstance(d, str): 56 | if not isinstance(u, collections.abc.Sequence) or isinstance(u, str): 57 | return deepcopy(u) 58 | new_list = [ 59 | merge_nested(d_item, u_item) if u_item is not EMPTY else d_item 60 | for d_item, u_item in zip_longest(d, u, fillvalue=EMPTY) 61 | ] 62 | return new_list 63 | return deepcopy(u) 64 | 65 | 66 | def parse_as_json_if_possible(v: str) -> Any: 67 | """Parse a string value as json if possible, but fallback to the string if not.""" 68 | if isinstance(v, str): 69 | try: 70 | return json.loads(v) 71 | except json.JSONDecodeError: 72 | pass 73 | return v 74 | 75 | 76 | def int_if_digit(s: str): 77 | """Iterpret as integer if `s` represents a digit string.""" 78 | try: 79 | if s.isdigit(): 80 | return int(s) 81 | except AttributeError: 82 | pass 83 | return s 84 | -------------------------------------------------------------------------------- /climatecontrol/ext/pydantic.py: -------------------------------------------------------------------------------- 1 | """Climatecontrol extension for using pydantic schemas as source. 2 | 3 | Supports both Pydantic v1 (>=1.7.4) and v2 (>=2.0). 4 | """ 5 | 6 | from typing import Generic, Mapping, Type, TypeVar 7 | 8 | from pydantic import BaseModel 9 | 10 | from climatecontrol.core import Climate as BaseClimate 11 | from climatecontrol.core import SettingsItem as BaseSettingsItem 12 | from climatecontrol.fragment import FragmentPath 13 | 14 | T = TypeVar("T", bound=BaseModel) 15 | 16 | 17 | class SettingsItem(BaseSettingsItem): 18 | @classmethod 19 | def _self_is_mutable(cls, value) -> bool: 20 | return super()._self_is_mutable(value) or isinstance(value, BaseModel) 21 | 22 | 23 | class Climate(BaseClimate, Generic[T]): 24 | """Climate settings manager for dataclasses.""" 25 | 26 | def __init__(self, *args, model: Type[T], **kwargs): 27 | """Initialize pydantic climate object. 28 | 29 | Uses a pydantic model as a schema to initialize settings and check types. 30 | 31 | Args: 32 | *args, **kwargs: See :class:`climateontrol.Climate` 33 | model: Additional argument specific to the model to use for the settings. 34 | 35 | Examples: 36 | 37 | >>> from climatecontrol.ext.pydantic import Climate 38 | >>> from pydantic import BaseModel, Field 39 | >>> 40 | >>> class SettingsSubSchema(BaseModel): 41 | ... d: int = 4 42 | ... 43 | >>> class SettingsSchema(BaseModel): 44 | ... a: str = 'test' 45 | ... b: bool = False 46 | ... c: SettingsSubSchema = Field(default_factory=SettingsSubSchema) 47 | ... 48 | >>> climate = Climate(model=SettingsSchema) 49 | >>> # defaults are initialized automatically: 50 | >>> climate.settings.a 51 | 'test' 52 | >>> climate.settings.c.d 53 | 4 54 | >>> # Types are checked if given 55 | >>> climate.update({'c': {'d': 'boom!'}}) # doctest: +ELLIPSIS, +IGNORE_EXCEPTION_DETAIL 56 | Traceback (most recent call last): 57 | ... 58 | pydantic...ValidationError: 1 validation error... 59 | ... 60 | 61 | Note: 62 | This extension supports both Pydantic v1 (>=1.7.4) and v2 (>=2.0). 63 | The error messages and formats may vary between versions. 64 | 65 | See Also: 66 | :module:`pydantic`: Used to initialize and check settings. 67 | 68 | """ 69 | self.model = model 70 | super().__init__(*args, **kwargs) 71 | 72 | @property 73 | def settings(self) -> T: 74 | self.ensure_initialized() 75 | return SettingsItem(self._data, self, FragmentPath()) 76 | 77 | def parse(self, data: Mapping) -> T: 78 | """Parse data into the provided dataclass.""" 79 | data = super().parse(data) 80 | obj: T = self.model(**data) 81 | return obj 82 | -------------------------------------------------------------------------------- /climatecontrol/ext/dataclasses.py: -------------------------------------------------------------------------------- 1 | """Extension for using climatecontrol with dataclasses.""" 2 | 3 | from dataclasses import is_dataclass 4 | from typing import Generic, Mapping, Type, TypeVar 5 | 6 | import dacite 7 | 8 | from climatecontrol.core import Climate as BaseClimate 9 | from climatecontrol.core import SettingsItem as BaseSettingsItem 10 | from climatecontrol.fragment import FragmentPath 11 | 12 | T = TypeVar("T") 13 | 14 | 15 | class SettingsItem(BaseSettingsItem): 16 | @classmethod 17 | def _self_is_mutable(cls, value) -> bool: 18 | return super()._self_is_mutable(value) or is_dataclass(value) 19 | 20 | 21 | class Climate(BaseClimate, Generic[T]): 22 | """Climate settings manager for dataclasses.""" 23 | 24 | _processors = tuple(list(BaseClimate._processors) + []) 25 | 26 | def __init__(self, *args, dataclass_cls: Type[T], **kwargs): 27 | """Initialize dataclass climate object. 28 | 29 | Uses a dataclass as a schema to initialize settings and check types. 30 | 31 | Args: 32 | *args, **kwargs: See :class:`climateontrol.Climate` 33 | dataclass_cls: Additional argument specific to the dataclass extension. Given a class devorated by :func:`dataclasses.dataclass` the settings object will be initialized and checked according to the classes specifications and types. 34 | 35 | Examples: 36 | 37 | >>> from climatecontrol.ext.dataclasses import Climate 38 | >>> from dataclasses import dataclass, field 39 | >>> 40 | >>> @dataclass 41 | ... class SettingsSubSchema: 42 | ... d: int = 4 43 | ... 44 | >>> @dataclass 45 | ... class SettingsSchema: 46 | ... a: str = 'test' 47 | ... b: bool = False 48 | ... c: SettingsSubSchema = field(default_factory=SettingsSubSchema) 49 | ... 50 | >>> climate = Climate(dataclass_cls=SettingsSchema) 51 | >>> # defaults are initialized automatically: 52 | >>> climate.settings.a 53 | 'test' 54 | >>> climate.settings.c.d 55 | 4 56 | >>> # Types are checked if given 57 | >>> climate.update({'c': {'d': 'boom!'}}) 58 | Traceback (most recent call last): 59 | ... 60 | dacite.exceptions.WrongTypeError: wrong value type for field "c.d" - should be "int" instead of value "boom!" of type "str" 61 | 62 | See Also: 63 | :module:`dacite`: Used to initialize and check dataclasses. 64 | 65 | """ 66 | self.dataclass_cls = dataclass_cls 67 | super().__init__(*args, **kwargs) 68 | 69 | @property 70 | def settings(self) -> T: 71 | self.ensure_initialized() 72 | return SettingsItem(self._data, self, FragmentPath()) 73 | 74 | def parse(self, data: Mapping) -> T: 75 | """Parse data into the provided dataclass.""" 76 | data = super().parse(data) 77 | obj: T = dacite.from_dict(self.dataclass_cls, {k: v for k, v in data.items()}) 78 | return obj 79 | -------------------------------------------------------------------------------- /tests/ext/test_pydantic.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | import pytest 3 | 4 | try: 5 | from pydantic import BaseModel, ValidationError 6 | except ImportError: 7 | pytest.skip("pydantic not installed", allow_module_level=True) 8 | 9 | from climatecontrol.ext.pydantic import Climate 10 | 11 | 12 | def test_climate_simple(mock_empty_os_environ): 13 | """Test basic pydantic climate object.""" 14 | 15 | class A(BaseModel): 16 | a: int = 1 17 | b: str = "yeah" 18 | 19 | climate = Climate(model=A) 20 | assert climate.settings.a == 1 21 | assert climate.settings.b == "yeah" 22 | 23 | 24 | def test_climate(mock_empty_os_environ): 25 | """Test climate with pydantic models.""" 26 | 27 | class C(BaseModel): 28 | d: str = "weee" 29 | e: int = 0 30 | 31 | class A(BaseModel): 32 | c: C = C() 33 | a: int = 1 34 | b: str = "yeah" 35 | 36 | climate = Climate(model=A) 37 | assert climate.settings.c.d == "weee" 38 | climate.update({"b": "changed"}) 39 | assert len(climate._fragments) == 1 40 | assert climate.settings.b == "changed" 41 | assert climate.settings.c.d == "weee" 42 | assert climate.settings.c.e == 0 43 | 44 | climate.update({"c": {"d": "test"}}) 45 | assert len(climate._fragments) == 2 46 | assert climate.settings.c.d == "test" 47 | 48 | climate.update({"c": C(d="test2")}) 49 | assert len(climate._fragments) == 3 50 | assert climate.settings.c.d == "test2" 51 | 52 | with pytest.raises(ValidationError): 53 | # assigning a list to a "str" field should fail 54 | climate.update({"c": {"d": [1, 2, 3]}}) 55 | # no update should have been performed. 56 | assert len(climate._fragments) == 3 57 | 58 | 59 | def test_climate_nested_models(mock_empty_os_environ): 60 | """Test climate with nested pydantic models.""" 61 | 62 | class D(BaseModel): 63 | value: str = "nested" 64 | 65 | class C(BaseModel): 66 | d: D = D() 67 | name: str = "middle" 68 | 69 | class A(BaseModel): 70 | c: C = C() 71 | a: int = 1 72 | 73 | climate = Climate(model=A) 74 | assert climate.settings.c.d.value == "nested" 75 | assert climate.settings.c.name == "middle" 76 | assert climate.settings.a == 1 77 | 78 | climate.update({"c": {"d": {"value": "updated"}}}) 79 | assert climate.settings.c.d.value == "updated" 80 | 81 | 82 | def test_climate_type_validation(mock_empty_os_environ): 83 | """Test that pydantic type validation works.""" 84 | 85 | class A(BaseModel): 86 | number: int 87 | flag: bool = False 88 | 89 | climate = Climate(model=A) 90 | 91 | climate.update({"number": 42}) 92 | assert climate.settings.number == 42 93 | 94 | climate.update({"flag": True}) 95 | assert climate.settings.flag is True 96 | 97 | # String to int should work for valid integers 98 | climate.update({"number": "123"}) 99 | assert climate.settings.number == 123 100 | 101 | # Invalid type conversion should fail 102 | with pytest.raises(ValidationError): 103 | climate.update({"number": "not a number"}) 104 | 105 | 106 | def test_climate_with_defaults(mock_empty_os_environ): 107 | """Test climate with default values.""" 108 | 109 | class Settings(BaseModel): 110 | host: str = "localhost" 111 | port: int = 8080 112 | debug: bool = False 113 | 114 | climate = Climate(model=Settings) 115 | assert climate.settings.host == "localhost" 116 | assert climate.settings.port == 8080 117 | assert climate.settings.debug is False 118 | 119 | climate.update({"port": 3000, "debug": True}) 120 | assert climate.settings.host == "localhost" # unchanged 121 | assert climate.settings.port == 3000 122 | assert climate.settings.debug is True 123 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # Changelog 2 | 3 | ## [0.11.0] - 2022-02-28 4 | 5 | ### Added 6 | 7 | - Python 3.10 support (added to test suite) 8 | 9 | ### Removed 10 | 11 | - Python 3.6 support 12 | 13 | ### Changed 14 | 15 | - Use `tomli` instead of `toml` package in in preparation for 16 | https://www.python.org/dev/peps/pep-0680/ (tomli is the basis for pythons 17 | future builtin toml support) 18 | - Updated dependencies 19 | 20 | 21 | ## [0.10.0] - 2021-04-17 22 | 23 | ### Changed 24 | 25 | - Allow `settings_files` to contain `pathlib.Path` objects as well as strings. 26 | 27 | ### Removed 28 | 29 | - Assigning strings to `settings_files` will no longer coerce strings into lists. 30 | 31 | ## [0.9.0] - 2020-10-13 32 | 33 | ### Added 34 | 35 | - A "_from_file" key at the root of a file loads the file into the root configuration 36 | 37 | ### Removed 38 | 39 | - Remove deprecated `climatecontrol.Settings` (use `climatecontrol.Climate` instead) 40 | - Remove deprecated options `implicit_depth` and `update_on_init`. 41 | 42 | ## [0.8.0] - 2019-02-27 43 | 44 | ### Added 45 | 46 | - Allow "_from_env" postfix in variables and load settings from the specified 47 | environment variable (similar to "from_file"). 48 | - Add "update_log" property which specifies a log of all variables loaded or 49 | removed/replaced. 50 | - Allow overwriting / merging lists 51 | - Allow environment variables to target list indexes (`CLIMATECONTROL_SECTION__0__VALUE=testvalue`) 52 | - Settings proxy object to allow attribute queries (i.e. 53 | `climatecontrol.settings.a.b[0].c`). 54 | - Added initial support for dataclasses as extension `climatecontrol.ext.dataclasses` 55 | - Add "from_*_content" processor allowing direct content to be used in variables 56 | (can be helpful for environment variables i.e. 57 | `CLIMATECONTROL_SECTION1_FROM_JSON_CONTENT='{"v1": 1, "v2": 2}'`). 58 | - Add inferred settings files and search for them along the directory structure 59 | up to project root (`climatecontrol.core.Climate.inferred_settings_files`). 60 | 61 | ### Changed 62 | 63 | - Use fragments as base data structure for building settings map 64 | - Add python 3.7 and 3.8 to CI and update development environment config to 3.8 65 | - Format using [black](https://github.com/psf/black) 66 | - Dissallow setting or deleting objects from settings object (to avoid confusion 67 | when setting the object does not change the settings object after an update) 68 | 69 | ### Deprecated 70 | 71 | - Deprecated `implicit_depth` argument in settings object 72 | - Deprecate use of `Settings` class (use `Climate` instead, `Settings` is now an 73 | alias of `Climate`) 74 | 75 | ### Removed 76 | 77 | - Remove python 3.5 support 78 | 79 | 80 | ## [0.7.3] - 2019-01-17 81 | 82 | ### Fixed 83 | 84 | - Fix update with bad config leaving the object in a broken state ( #15 ) 85 | 86 | 87 | ## [0.7.2] - 2018-10-04 88 | 89 | ### Fixed 90 | 91 | - Fix bug where value of "from_file" variable was logged instead of the file. 92 | 93 | ### Added 94 | 95 | - Support for click 7.0 96 | 97 | 98 | ## [0.7.1] - 2018-08-14 99 | 100 | ### Fixed 101 | 102 | - Logging settings update recursively and do not overwrite unrelated defaults 103 | 104 | ### Removed 105 | 106 | - Actually remove deprecated `max_depth` (see 0.7) 107 | 108 | 109 | ## [0.7] - 2018-07-28 110 | 111 | ### Removed 112 | 113 | - Remove deprecated `max_depth` option for `EnvParser`/`Settings`. Use `implicit_depth` instead. 114 | - Removed unneeded (and undocumented) features: 115 | - Filtering (error-prone and undocumented) 116 | - Dynamic preparsers (should now be handled through subclassing) 117 | - Ordering of source loading (should now be handled through subclassing) 118 | - Remove hard dependency on toml (if no serialization libraries like toml or 119 | yaml are installed, will fall back to json). 120 | - Remove unneeded `logtools` module. 121 | 122 | ### Fixed 123 | 124 | - #13 : Update fragments (separate files) are now preprocessed separately. 125 | "from_file" variables can no longer override subsequent file settings. 126 | 127 | ### Added 128 | 129 | - Add serialization to json and yaml when generating configurations. 130 | - Allow "from_file" modules to be parsed recursively (if a from_file setting is 131 | set to a json/yaml/toml file, it's contents will be parsed as a settings file 132 | instead of a simple string) 133 | - Add debug logging which settings attributes were set. 134 | 135 | ## [0.6] - 2018-04-23 136 | 137 | ### Added 138 | 139 | - [#9](https://github.com/daviskirk/climatecontrol/pull/9): Add temporary_changes method to Settings object 140 | - [#10](https://github.com/daviskirk/climatecontrol/pull/10): Add better logging on setup 141 | 142 | 143 | ## [0.5] - 2018-03-13 144 | 145 | ### Changed 146 | - splitting behaviour has changed: By default, a double ``split_char`` ("\_\_") 147 | indicates a new nested settings section. This allows environment variables to 148 | look more natural when they are describing nested settings. The option 149 | ``implicit_depth`` may be used to override this behaviour and have a single 150 | "\_" indicate a new nested section. 151 | - ``max_depth`` parameter has been deprecated in favor of ``implicit_depth`` 152 | -------------------------------------------------------------------------------- /tests/test_env_parser.py: -------------------------------------------------------------------------------- 1 | """Test settings.""" 2 | 3 | import os 4 | import sys 5 | 6 | import pytest 7 | 8 | from climatecontrol.env_parser import EnvParser # noqa: I100 9 | from climatecontrol.fragment import Fragment 10 | 11 | 12 | @pytest.mark.parametrize( 13 | "attr, value, expected", 14 | [ 15 | ("prefix", "that", "THAT_"), 16 | ("settings_file_suffix", "suffix2", "suffix2"), 17 | ("settings_file_env_var", "wrongval", None), 18 | ], 19 | ) 20 | def test_envparser_assign(mock_empty_os_environ, attr, value, expected): 21 | """Check that we can assign attributes of env parser.""" 22 | s = EnvParser(prefix="this", settings_file_suffix="suffix") 23 | assert s.prefix == "THIS_" 24 | assert s.settings_file_suffix == "suffix" 25 | assert s.settings_file_env_var == "THIS_SUFFIX" 26 | 27 | if attr == "settings_file_env_var": 28 | with pytest.raises(AttributeError): 29 | setattr(s, attr, value) 30 | else: 31 | setattr(s, attr, value) 32 | assert getattr(s, attr) == expected 33 | if attr == "settings_file_suffix": 34 | assert s.settings_file_env_var == "THIS_" + expected.upper() 35 | 36 | 37 | @pytest.mark.parametrize( 38 | "prefix, split_char, expected_kws", 39 | [ 40 | ("TEST_STUFF", "-", []), 41 | ( 42 | "TEST_STUFF", 43 | "_", 44 | [ 45 | dict( 46 | value=6, 47 | source="ENV:TEST_STUFF_TESTGROUP__TEST_VAR", 48 | path=["testgroup", "test_var"], 49 | ), 50 | dict( 51 | value=7, 52 | source="ENV:TEST_STUFF_TESTGROUP__TESTVAR", 53 | path=["testgroup", "testvar"], 54 | ), 55 | dict( 56 | value=9, 57 | source="ENV:TEST_STUFF_TESTGROUP_TEST_VAR", 58 | path=["testgroup_test_var"], 59 | ), 60 | ], 61 | ), 62 | ("TEST_STUFFING", "_", []), 63 | ], 64 | ) 65 | def test_envparser_args_iter_load(mock_os_environ, prefix, split_char, expected_kws): 66 | """Check that we can parse settings from variables.""" 67 | env_parser = EnvParser(prefix=prefix, split_char=split_char) 68 | expected = [Fragment(**kw) for kw in expected_kws] 69 | results = list(env_parser.iter_load()) 70 | assert results == expected 71 | 72 | 73 | @pytest.mark.parametrize( 74 | "environ, expected_kw", 75 | [ 76 | pytest.param( 77 | { 78 | "TEST_STUFF_TESTGROUP__TEST_INT": "6", 79 | "TEST_STUFF_TESTGROUP__TEST_ARRAY": "[4, 5, 6]", 80 | "TEST_STUFF_TESTGROUP__TEST_RAW_STR": "al//asdjk", 81 | "TEST_STUFF_TESTGROUP__TEST_STR": '"[4, 5, 6]"', 82 | "TEST_STUFF_TESTGROUP__TEST_COMPLEX_RAW_STR": "amqp://guest:guest@127.0.0.1:5672//", 83 | }, 84 | [ 85 | { 86 | "value": 6, 87 | "source": "ENV:TEST_STUFF_TESTGROUP__TEST_INT", 88 | "path": ["testgroup", "test_int"], 89 | }, 90 | { 91 | "value": [4, 5, 6], 92 | "source": "ENV:TEST_STUFF_TESTGROUP__TEST_ARRAY", 93 | "path": ["testgroup", "test_array"], 94 | }, 95 | { 96 | "value": "al//asdjk", 97 | "source": "ENV:TEST_STUFF_TESTGROUP__TEST_RAW_STR", 98 | "path": ["testgroup", "test_raw_str"], 99 | }, # noqa: E501 100 | { 101 | "value": "[4, 5, 6]", 102 | "source": "ENV:TEST_STUFF_TESTGROUP__TEST_STR", 103 | "path": ["testgroup", "test_str"], 104 | }, 105 | { 106 | "value": "amqp://guest:guest@127.0.0.1:5672//", 107 | "source": "ENV:TEST_STUFF_TESTGROUP__TEST_COMPLEX_RAW_STR", 108 | "path": ["testgroup", "test_complex_raw_str"], 109 | }, # noqa: E501 110 | ], 111 | id="json value", 112 | ), 113 | pytest.param( 114 | {"TEST_STUFF_TESTLIST__5": "v1"}, 115 | [ 116 | { 117 | "value": "v1", 118 | "source": "ENV:TEST_STUFF_TESTLIST__5", 119 | "path": ["testlist", 5], 120 | } 121 | ], 122 | id="list index variable", 123 | ), 124 | pytest.param( 125 | {"TEST_STUFF_TESTLIST__1__TEST": "v1"}, 126 | [ 127 | { 128 | "value": "v1", 129 | "source": "ENV:TEST_STUFF_TESTLIST__1__TEST", 130 | "path": ["testlist", 1, "test"], 131 | } 132 | ], 133 | id="list index with dict variable", 134 | ), 135 | ], 136 | ) 137 | def test_env_parser_iter_load(monkeypatch, environ, expected_kw): 138 | """Check that iter_load correctly interprets environment variables and their values.""" 139 | monkeypatch.setattr(os, "environ", environ) 140 | env_parser = EnvParser(prefix="TEST_STUFF") 141 | expected = [Fragment(**kw) for kw in expected_kw] 142 | result = list(env_parser.iter_load()) 143 | if sys.version_info[:2] >= (3, 6): # pragma: nocover 144 | assert result == expected 145 | else: # pragma: nocover 146 | # python 3.5 doesn't order dicts so we can't test the exact order 147 | def to_set(fragments): 148 | return {str(f) for f in fragments} 149 | 150 | assert to_set(result) == to_set(expected) 151 | -------------------------------------------------------------------------------- /tests/conftest.py: -------------------------------------------------------------------------------- 1 | """Test settings.""" 2 | 3 | import itertools 4 | import json 5 | import os 6 | from collections import OrderedDict 7 | from copy import deepcopy 8 | from pathlib import Path 9 | from textwrap import dedent 10 | 11 | import pytest 12 | import tomli_w 13 | import yaml 14 | 15 | 16 | @pytest.fixture(autouse=True) 17 | def recover_directory(tmpdir): 18 | original_dir = Path(".").resolve() 19 | yield 20 | os.chdir(original_dir) 21 | 22 | 23 | @pytest.fixture(scope="session") 24 | def original_os_environ(): 25 | return deepcopy(os.environ) 26 | 27 | 28 | @pytest.fixture(autouse=True) 29 | def reset_os_environ(original_os_environ): 30 | yield 31 | os.environ.clear() 32 | os.environ.update(original_os_environ) 33 | 34 | 35 | @pytest.fixture 36 | def mock_os_environ(monkeypatch): 37 | """Mock os environment and set a few settings with environment variables.""" 38 | mock_environ = OrderedDict( 39 | [ 40 | ("test_stuff", 2), 41 | ("TEST_STUFF", 5), 42 | ("TEST_STUFF_TESTGROUP__TEST_VAR", 6), 43 | ("TEST_STUFF_TESTGROUP__TESTVAR", 7), 44 | ("TEST_STUFFTESTGROUP__TESTVAR", 8), 45 | ("TEST_STUFF_TESTGROUP_TEST_VAR", 9), 46 | ] 47 | ) 48 | monkeypatch.setattr(os, "environ", mock_environ) 49 | 50 | 51 | @pytest.fixture 52 | def mock_empty_os_environ(monkeypatch): 53 | """Mock os environment so it seems completely empty.""" 54 | mock_environ: dict = {} 55 | monkeypatch.setattr(os, "environ", mock_environ) 56 | 57 | 58 | @pytest.fixture( 59 | params=list(itertools.product([".toml", ".yml", ".yaml", ".json"], [False, True])) 60 | ) 61 | def file_extension(request, monkeypatch): 62 | """Fixture for providing file extension to use in settings files. 63 | 64 | This fixture is parametrized to mock out all "unneeded" modules to make 65 | sure that unneeded libraries do not need to be installed. 66 | 67 | """ 68 | ext, mock_other = request.param 69 | if mock_other: 70 | if ext == ".toml": 71 | monkeypatch.setattr("climatecontrol.file_loaders.yaml", None) 72 | elif ext in {".yml", ".yaml"}: 73 | monkeypatch.setattr("climatecontrol.file_loaders.tomli", None) 74 | else: 75 | monkeypatch.setattr("climatecontrol.file_loaders.yaml", None) 76 | monkeypatch.setattr("climatecontrol.file_loaders.tomli", None) 77 | return ext 78 | 79 | 80 | @pytest.fixture 81 | def mock_settings_file(request, monkeypatch, tmpdir, file_extension): 82 | """Temporarily write a settings file and return the filepath and the expected settings outcome.""" 83 | ext = file_extension 84 | p = tmpdir.mkdir("sub").join("settings" + ext) 85 | 86 | expected_result = {"testgroup": {"testvar": 123}, "othergroup": {"blabla": 555}} 87 | 88 | if ext == ".toml": 89 | p.write(tomli_w.dumps(expected_result)) 90 | elif ext in [".yml", ".yaml"]: 91 | p.write("---\n" + yaml.safe_dump(expected_result)) 92 | elif ext == ".json": 93 | p.write(json.dumps(expected_result)) 94 | else: # pragma: nocover 95 | raise NotImplementedError("Invalid file extension :{}.".format(ext)) 96 | 97 | return str(p), expected_result 98 | 99 | 100 | @pytest.fixture 101 | def mock_settings_files( 102 | request, monkeypatch, tmpdir, mock_settings_file, file_extension 103 | ): 104 | """Temporarily write multiple settings file and return the filepaths and the expected settings outcome.""" 105 | subdir = tmpdir.mkdir("sub2") 106 | ext = file_extension 107 | 108 | # File to load in settings file by adding "from_file" to the variable we want. 109 | inline_path = subdir.join("secret.txt") 110 | inline_path.write("foo") 111 | 112 | if ext == ".toml": 113 | s1 = dedent( 114 | """\ 115 | [testgroup] 116 | testvar = 123 117 | testvar_inline_1_from_file = "{}" 118 | 119 | [othergroup] 120 | blabla = 55 121 | testvar_inline_2_from_file = "{}" 122 | """.format( 123 | str(inline_path), str(inline_path) 124 | ) 125 | ) 126 | 127 | s2 = dedent( 128 | """\ 129 | 130 | [othergroup] 131 | blabla = 555 132 | testvar_inline_2 = "bar" 133 | """ 134 | ) 135 | elif ext in (".yml", ".yaml"): 136 | s1 = dedent( 137 | """\ 138 | testgroup: 139 | testvar: 123 140 | testvar_inline_1_from_file: {} 141 | 142 | othergroup: 143 | blabla: 55 144 | testvar_inline_2_from_file: {} 145 | """.format( 146 | str(inline_path), str(inline_path) 147 | ) 148 | ) 149 | 150 | s2 = dedent( 151 | """\ 152 | 153 | othergroup: 154 | blabla: 555 155 | testvar_inline_2: bar 156 | """ 157 | ) 158 | elif ext == ".json": 159 | s1 = ( 160 | dedent( 161 | """\ 162 | { 163 | "testgroup": {"testvar": 123, "testvar_inline_1_from_file": "%s"}, 164 | "othergroup": {"blabla": 55, "testvar_inline_2_from_file": "%s"} 165 | } 166 | """ 167 | ) 168 | % (str(inline_path), str(inline_path)) 169 | ) 170 | 171 | s2 = dedent( 172 | """\ 173 | {"othergroup": {"blabla": 555, "testvar_inline_2": "bar"}} 174 | """ 175 | ) 176 | else: # pragma: nocover 177 | raise NotImplementedError("Invalid file extension :{}.".format(ext)) 178 | p1 = subdir.join("settings" + ext) 179 | p1.write(s1) 180 | p2 = subdir.join("settings2" + ext) 181 | p2.write(s2) 182 | 183 | expected_result = { 184 | "testgroup": {"testvar": 123, "testvar_inline_1": "foo"}, 185 | "othergroup": { 186 | "blabla": 555, # Overridden by file 2 187 | "testvar_inline_2": "bar", # Overridden by file 2 188 | }, 189 | } 190 | return [str(p1), str(p2)], expected_result 191 | 192 | 193 | @pytest.fixture 194 | def mock_env_settings_file(mock_os_environ, mock_settings_file): 195 | """Set the settings file env variable to a temporary settings file.""" 196 | os.environ["TEST_STUFF_SETTINGS_FILE"] = mock_settings_file[0] 197 | return mock_settings_file 198 | -------------------------------------------------------------------------------- /climatecontrol/file_loaders.py: -------------------------------------------------------------------------------- 1 | """Module for loading various file formats.""" 2 | 3 | import glob 4 | import json 5 | import os 6 | from abc import ABC, abstractmethod 7 | from pathlib import Path 8 | from typing import Any, Dict, Iterator, List, Tuple, Union 9 | 10 | from .exceptions import NoCompatibleLoaderFoundError 11 | from .fragment import Fragment 12 | 13 | try: 14 | import tomli 15 | except ImportError: # pragma: nocover 16 | tomli = None # type: ignore 17 | try: 18 | import yaml 19 | except ImportError: # pragma: nocover 20 | yaml = None # type: ignore 21 | 22 | 23 | def iter_load(path: Union[str, Path]) -> Iterator[Fragment]: 24 | """Read settings file from a filepath or from a string representing the file contents. 25 | 26 | If ``path`` is a valid filename or glob expression, load the 27 | file (or all matching files). 28 | 29 | Note that json, yaml and toml files are read. 30 | 31 | Args: 32 | path: Path to file or file contents 33 | 34 | Raises: 35 | FileLoadError: when an error occurs during the loading of a file. 36 | NoCompatibleLoaderFoundError: when no compatible loader was found for 37 | this filepath or content type. 38 | 39 | """ 40 | if not path: 41 | return 42 | expanded_path: str = os.path.expanduser(os.path.expandvars(path)) 43 | if glob.has_magic(expanded_path): 44 | filepaths: List[str] = sorted(glob.glob(expanded_path)) 45 | else: 46 | filepaths = [expanded_path] 47 | for filepath in filepaths: 48 | yield Fragment(value=load_from_filepath(filepath), source=filepath) 49 | 50 | 51 | def load_from_filepath(filepath: str) -> Dict[str, Any]: 52 | """Read settings file from a filepath or from a string representing the file contents. 53 | 54 | Args: 55 | filepath: Path to file or file contents 56 | 57 | Raises: 58 | FileLoadError: when an error occurs during the loading of a file. 59 | ContentLoadError: when an error occurs during the loading of file contents. 60 | NoCompatibleLoaderFoundError: when no compatible loader was found for 61 | this filepath or content type. 62 | 63 | """ 64 | file_data: dict = {} 65 | if not filepath: 66 | return file_data 67 | for loader in FileLoader.registered_loaders: 68 | if loader.is_path(filepath): 69 | file_data = loader.from_path(filepath) 70 | break 71 | else: 72 | raise NoCompatibleLoaderFoundError( 73 | "Failed to load settings from filepath. " 74 | "No compatible loader for file: {}".format(filepath) 75 | ) 76 | return file_data 77 | 78 | 79 | class FileLoader(ABC): 80 | """Abstract base class for file/file content loading.""" 81 | 82 | format_name: str = "" 83 | valid_file_extensions: Tuple[str, ...] = () 84 | registered_loaders: List["FileLoader"] = [] 85 | 86 | @classmethod 87 | @abstractmethod 88 | def from_path(cls, path: str) -> Any: 89 | """Load serialized data from file at path.""" 90 | 91 | @classmethod 92 | @abstractmethod 93 | def from_content(cls, content: str) -> Any: 94 | """Load serialized data from content.""" 95 | 96 | @classmethod 97 | def is_path(cls, path_or_content: str): 98 | """Check if argument is a valid file path. 99 | 100 | If `only_existing` is set to ``True``, paths to files that don't exist 101 | will also return ``False``. 102 | """ 103 | return len(str(path_or_content).strip().splitlines()) == 1 and ( 104 | os.path.splitext(path_or_content)[1] in cls.valid_file_extensions 105 | ) 106 | 107 | @classmethod 108 | def register(cls, class_to_register): 109 | """Register class as a valid file loader.""" 110 | cls.registered_loaders.append(class_to_register) 111 | return class_to_register 112 | 113 | 114 | @FileLoader.register 115 | class JsonLoader(FileLoader): 116 | """FileLoader for .json files.""" 117 | 118 | format_name = "json" 119 | valid_file_extensions = (".json",) 120 | 121 | @classmethod 122 | def from_content(cls, content: str) -> Any: 123 | """Load json from string.""" 124 | return json.loads(content) 125 | 126 | @classmethod 127 | def from_path(cls, path: str): 128 | """Load json from file at path.""" 129 | with open(path) as f: 130 | return json.load(f) 131 | 132 | @classmethod 133 | def to_content(cls, data) -> str: 134 | """Serialize mapping to string.""" 135 | return json.dumps(data, indent=4) 136 | 137 | 138 | @FileLoader.register 139 | class YamlLoader(FileLoader): 140 | """FileLoader for .yaml files.""" 141 | 142 | format_name = "yaml" 143 | valid_file_extensions = (".yml", ".yaml") 144 | 145 | @classmethod 146 | def from_content(cls, content: str) -> Any: 147 | """Load data from yaml formatted string.""" 148 | cls._check_yaml() 149 | return yaml.safe_load(content) 150 | 151 | @classmethod 152 | def from_path(cls, path: str) -> Any: 153 | """Load data from path containing a yaml file.""" 154 | cls._check_yaml() 155 | with open(path) as f: 156 | return yaml.safe_load(f) 157 | 158 | @staticmethod 159 | def _check_yaml(): 160 | if yaml is None: 161 | raise ImportError( 162 | '"pyyaml" package needs to be installed to parse yaml files.' 163 | ) 164 | 165 | 166 | @FileLoader.register 167 | class TomlLoader(FileLoader): 168 | """FileLoader for .toml files.""" 169 | 170 | format_name = "toml" 171 | valid_file_extensions = (".toml", ".ini", ".config", ".conf", ".cfg") 172 | 173 | @classmethod 174 | def from_content(cls, content: str) -> Any: 175 | """Load toml from string.""" 176 | cls._check_toml() 177 | return tomli.loads(content) 178 | 179 | @classmethod 180 | def from_path(cls, path: str): 181 | """Load toml from file at path.""" 182 | cls._check_toml() 183 | with open(path, "rb") as f: 184 | return tomli.load(f) 185 | 186 | @staticmethod 187 | def _check_toml(): 188 | if tomli is None: 189 | raise ImportError( 190 | '"toml" package needs to be installed to parse toml files.' 191 | ) 192 | -------------------------------------------------------------------------------- /climatecontrol/env_parser.py: -------------------------------------------------------------------------------- 1 | """Environment variable parser.""" 2 | 3 | import logging 4 | import os 5 | from typing import Iterable, Iterator, NamedTuple, Tuple 6 | 7 | from . import file_loaders 8 | from .fragment import Fragment 9 | from .utils import int_if_digit, parse_as_json_if_possible 10 | 11 | logger = logging.getLogger(__name__) 12 | 13 | EnvSetting = NamedTuple("EnvSetting", [("name", str), ("value", Fragment)]) 14 | 15 | 16 | class EnvParser: 17 | r"""Environment variable parser. 18 | 19 | Args: 20 | prefix: Only environment variables which start with this string 21 | (case insensitive) are considered. 22 | split_char: Character to split variables at. Note that if prefix 23 | is given, the variable name must also be seperated from the base 24 | with this character. 25 | settings_file_suffix: Suffix to identify an environment variable as a 26 | settings file. 27 | 28 | >>> env_parser=EnvParser(prefix='A', settings_file_suffix='SF') 29 | >>> env_parser.settings_file_env_var 30 | 'A_SF' 31 | 32 | exclude: Environment variables to exclude. Note that the settings file 33 | constructed from ``settings_file_suffix`` is excluded in any case. 34 | 35 | Attributes: 36 | settings_file_env_var: Name of the settings file environment variable. 37 | Is constructed automatically. 38 | 39 | Examples: 40 | >>> os.chdir(getfixture('tmpdir')) # noqa # only for test: don't clobber current directory 41 | >>> 42 | >>> env_parser = EnvParser(prefix='THIS_EXAMPLE') 43 | >>> 44 | >>> _ = open('settings.toml', 'w').write('[testgroup]\nother_var = 345') 45 | >>> 46 | >>> os.environ['THIS_EXAMPLE_TESTGROUP_TESTVAR'] = '27' 47 | >>> os.environ['THIS_EXAMPLE_SETTINGS_FILE'] = './settings.toml' 48 | >>> 49 | >>> fragments = list(env_parser.iter_load()) 50 | >>> fragments 51 | [Fragment(value={'testgroup': {'other_var': 345}}, source='ENV:THIS_EXAMPLE_SETTINGS_FILE:./settings.toml', path=FragmentPath([])), Fragment(value=27, source='ENV:THIS_EXAMPLE_TESTGROUP_TESTVAR', path=FragmentPath(['testgroup_testvar']))] 52 | 53 | """ 54 | 55 | def __init__( 56 | self, 57 | prefix: str = "CLIMATECONTROL", 58 | split_char: str = "_", 59 | settings_file_suffix: str = "SETTINGS_FILE", 60 | exclude: Iterable[str] = (), 61 | ) -> None: 62 | """Initialize object.""" 63 | self.settings_file_suffix = str(settings_file_suffix) 64 | self.split_char = split_char 65 | self.prefix = prefix 66 | self.exclude = exclude # type: ignore 67 | 68 | @property 69 | def exclude(self) -> Tuple[str, ...]: 70 | """Return excluded environment variables.""" 71 | exclude = self._exclude.union({self.settings_file_env_var}) 72 | return tuple(set(s.lower() for s in exclude)) 73 | 74 | @exclude.setter 75 | def exclude(self, exclude: Iterable[str] = ()) -> None: 76 | """Set excluded environment variables.""" 77 | self._exclude = set(exclude) 78 | 79 | @property 80 | def prefix(self) -> str: 81 | """Return prefix used to filter used environment variables.""" 82 | return self._build_env_var(self._prefix) + self.split_char 83 | 84 | @prefix.setter 85 | def prefix(self, value: str): 86 | """Set prefix used to filter used environment variables.""" 87 | self._prefix = str(value) 88 | 89 | @property 90 | def settings_file_env_var(self) -> str: 91 | """Return environment variable used to indicate a path to a settings file.""" 92 | return self._build_env_var(self.prefix, self.settings_file_suffix) 93 | 94 | @settings_file_env_var.setter 95 | def settings_file_env_var(self, value: str): 96 | """Set environment variable used to indicate a path to a settings file.""" 97 | raise AttributeError( 98 | "Can't set `settings_file_env_var` directly. Set `settings_file_suffix` instead." 99 | ) 100 | 101 | @property 102 | def split_char(self) -> str: 103 | """Return character used to split sections.""" 104 | return self._split_char 105 | 106 | @split_char.setter 107 | def split_char(self, char: str) -> None: 108 | """Set character used to split sections.""" 109 | char = str(char) 110 | if len(char) != 1: 111 | raise ValueError("``split_char`` must be a single character") 112 | self._split_char = str(char) 113 | 114 | def iter_load(self) -> Iterator[Fragment]: 115 | """Convert environment variables to fragments. 116 | 117 | Note that all string inputs are case insensitive and all resulting keys 118 | are lower case. 119 | 120 | Yields: 121 | Fragment representing a single environment variable value. 122 | 123 | """ 124 | settings_file_str = os.getenv(self.settings_file_env_var, "") 125 | settings_files = [s.strip() for s in settings_file_str.split(",")] 126 | for settings_file in settings_files: 127 | for fragment in file_loaders.iter_load(settings_file): 128 | fragment.source = ( 129 | "ENV:" + str(self.settings_file_env_var) + ":" + fragment.source 130 | ) 131 | yield fragment 132 | for env_var, env_var_value in os.environ.items(): 133 | nested_keys = list(self._iter_nested_keys(env_var)) 134 | if not nested_keys: 135 | continue 136 | value = parse_as_json_if_possible(env_var_value) 137 | fragment = Fragment(value=value, path=nested_keys, source="ENV:" + env_var) 138 | yield fragment 139 | 140 | def _build_env_var(self, *parts: str) -> str: 141 | return self.split_char.join(self._strip_split_char(p).upper() for p in parts) 142 | 143 | def _iter_nested_keys(self, env_var: str) -> Iterator[str]: 144 | """Iterate over nested keys of an environment variable name. 145 | 146 | Yields: 147 | String representing each nested key. 148 | 149 | """ 150 | env_var_low = env_var.lower() 151 | if env_var_low in self.exclude or not env_var_low.startswith( 152 | self.prefix.lower() 153 | ): 154 | return 155 | body = env_var_low[len(self.prefix) :] 156 | sections = body.split(self.split_char * 2) 157 | for i_section, section in enumerate(sections): 158 | if section: 159 | yield int_if_digit(section) 160 | 161 | def _strip_split_char(self, s): 162 | if s.startswith(self.split_char): 163 | s = s[len(self.split_char) :] 164 | elif s.endswith(self.split_char): 165 | s = s[: -len(self.split_char)] 166 | return s 167 | -------------------------------------------------------------------------------- /tests/test_fragment.py: -------------------------------------------------------------------------------- 1 | """Tests for fragments.""" 2 | 3 | import sys 4 | 5 | import pytest 6 | 7 | from climatecontrol.fragment import EMPTY, Fragment, FragmentPath, merge_nested 8 | 9 | 10 | def test_fragment_path(): 11 | """Test fragment path construction.""" 12 | assert not FragmentPath() # empty path is falsy 13 | assert FragmentPath([]) == FragmentPath(), "empty fragment paths should be equal" 14 | assert FragmentPath(["a", "stuff", 1]) == FragmentPath( 15 | ["a", "stuff", 1] 16 | ), "Unexpected equality result" 17 | assert FragmentPath(["a", "stuff", 1]) != FragmentPath( 18 | ["a", "wrong", 1] 19 | ), "Unexpected inequality result" 20 | assert list(FragmentPath(["a", "stuff", 1])) == [ 21 | "a", 22 | "stuff", 23 | 1, 24 | ], "Unexpected list conversion result" 25 | assert ( 26 | str(FragmentPath(["a", "stuff", 1])) == "FragmentPath(['a', 'stuff', 1])" 27 | ), "Unexpected string representation" 28 | assert FragmentPath(["a", "stuff", 1])[1] == "stuff", "unexpected indexing" 29 | 30 | 31 | @pytest.mark.parametrize( 32 | "path, expected", 33 | [ 34 | (["a"], {"a": "test"}), 35 | (["a", "stuff", 1, "bla"], {"a": {"stuff": [EMPTY, {"bla": "test"}]}}), 36 | ([2, "a"], [EMPTY, EMPTY, {"a": "test"}]), 37 | ([2, 1], [EMPTY, EMPTY, [EMPTY, "test"]]), 38 | ], 39 | ) 40 | def test_fragment_path_expand(path, expected): 41 | """Test expansion of fragment path.""" 42 | assert FragmentPath(path).expand("test") == expected 43 | 44 | 45 | @pytest.mark.parametrize( 46 | "a,b,expected", 47 | [ 48 | (["a"], ["b"], []), 49 | ([], [], []), 50 | ([1, 2, 3], [1, 2, 4, 5], [1, 2]), 51 | (["a", "b", "c"], ["a", "b"], ["a", "b"]), 52 | (["a", "b", "c"], ["a", "b", "d"], ["a", "b"]), 53 | (["a", "b", "c"], ["wrong", "b", "c"], []), 54 | ], 55 | ) 56 | def test_fragment_path_common(a, b, expected): 57 | """Test common path.""" 58 | assert FragmentPath(a).common(b) == FragmentPath(expected) 59 | 60 | 61 | def test_fragment(): 62 | """Test fragment constructor and representation.""" 63 | fragment = Fragment(value="bla", source="test", path=["a", "b"]) 64 | assert fragment 65 | assert ( 66 | str(fragment) 67 | == "Fragment(value='bla', source='test', path=FragmentPath(['a', 'b']))" 68 | ), "unexpected string representation" 69 | 70 | 71 | def test_fragment_equality(): 72 | """Test fragment equality.""" 73 | assert Fragment(value="bla", source="test", path=["a", "b"]) == Fragment( 74 | value="bla", source="test", path=["a", "b"] 75 | ) 76 | assert Fragment(value="bla") == Fragment(value="bla") 77 | assert Fragment(value="bla") != Fragment(value="blub") 78 | assert Fragment(value="bla", source="a") != Fragment(value="bla", source="b") 79 | assert Fragment(value="bla", path=["a"]) != Fragment(value="bla", path=["b"]) 80 | 81 | 82 | def test_fragment_clone(): 83 | """Test the fragments clone method.""" 84 | fragment = Fragment("bla") 85 | assert fragment.clone() == fragment 86 | assert fragment.clone() is not fragment 87 | cloned = fragment.clone(source="foobar") 88 | assert cloned != fragment 89 | assert cloned == Fragment("bla", source="foobar") 90 | 91 | 92 | def test_fragment_iter_leaves(): 93 | """Test that iterating over fragment leaves gives expected result.""" 94 | fragment = Fragment("bla") 95 | assert list(Fragment("bla").iter_leaves()) == [fragment] 96 | actual = list( 97 | Fragment( 98 | {"a": 4, "b": "test", "c": {"d": [2, {"e": None, "f": "test2"}]}} 99 | ).iter_leaves() 100 | ) 101 | 102 | expected = [ 103 | Fragment(value=4, path=["a"]), 104 | Fragment(value="test", path=["b"]), 105 | Fragment(value=2, path=["c", "d", 0]), 106 | Fragment(value=None, path=["c", "d", 1, "e"]), 107 | Fragment(value="test2", path=["c", "d", 1, "f"]), 108 | ] 109 | 110 | if sys.version_info[:2] >= (3, 6): # pragma: nocover 111 | assert actual == expected 112 | else: # pragma: nocover 113 | 114 | def to_set(fragment_list): 115 | return set((item.value, tuple(item.path)) for item in fragment_list) 116 | 117 | assert to_set(actual) == to_set(expected) 118 | 119 | 120 | def test_expand_value_with_path(): 121 | """Test expanding fragment value with path.""" 122 | actual = Fragment("bla", path=["a", "b"]).expand_value_with_path() 123 | expected = {"a": {"b": "bla"}} 124 | assert actual == expected 125 | 126 | 127 | @pytest.mark.parametrize( 128 | "a, b, expected", 129 | [ 130 | pytest.param({"a": 5}, {"b": 6}, {"a": 5, "b": 6}, id="simple dict update"), 131 | pytest.param( 132 | {"a": 5}, 133 | {"b": 6, "a": 4}, 134 | {"a": 4, "b": 6}, 135 | id="simple dict update with overwrite", 136 | ), 137 | pytest.param( 138 | {"a": {"b": 1, "c": 2}}, 139 | {"d": 3, "a": {"b": "new"}}, 140 | {"a": {"b": "new", "c": 2}, "d": 3}, 141 | id="nested dict update", 142 | ), 143 | pytest.param([1, 2, 3], [1, 4], [1, 4, 3], id="list update"), 144 | pytest.param( 145 | {"a": [1, {"b": 2}, 3]}, 146 | {"a": [EMPTY, 2]}, 147 | {"a": [1, 2, 3]}, 148 | id="nested dict list update", 149 | ), 150 | pytest.param( 151 | {"a": [1, {"b": 2}, 3]}, 152 | {"a": [EMPTY, {"c": 4}]}, 153 | {"a": [1, {"b": 2, "c": 4}, 3]}, 154 | id="nested dict list update", 155 | ), 156 | pytest.param(1, 2, 2, id="simple object overwrite"), 157 | pytest.param([1], 2, 2, id="simple object overwrite"), 158 | ], 159 | ) 160 | def test_merge_nested(a, b, expected): 161 | """Testing nested merge.""" 162 | assert merge_nested(a, b) == expected 163 | 164 | 165 | @pytest.mark.parametrize( 166 | "a_kw, b_kw, expected_kw", 167 | [ 168 | ({"value": 4, "path": ["a"]}, {"value": {"a": 5}}, {"value": {"a": 5}}), 169 | ( 170 | {"value": {"a": 4, "b": ["c", "d", "e"]}, "path": ["root"]}, 171 | {"value": {"bla": 2}, "path": ["root", "b", 1]}, 172 | {"value": {"a": 4, "b": ["c", {"bla": 2}, "e"]}, "path": ["root"]}, 173 | ), 174 | ( 175 | {"value": {"a": 4, "b": {"c": "d"}}, "path": ["root"]}, 176 | {"value": {"b": 2}, "path": ["root"]}, 177 | {"value": {"a": 4, "b": 2}, "path": ["root"]}, 178 | ), 179 | ({"value": 3}, {"value": 5}, {"value": 5}), 180 | ], 181 | ) 182 | def test_fragment_merge(a_kw, b_kw, expected_kw): 183 | """Test the apply method of the fragment class.""" 184 | actual = Fragment(**a_kw).merge(Fragment(**b_kw)) 185 | expected = Fragment(**expected_kw) 186 | assert actual == expected 187 | -------------------------------------------------------------------------------- /climatecontrol/fragment.py: -------------------------------------------------------------------------------- 1 | """Module for defining settings fragments.""" 2 | 3 | from contextlib import suppress 4 | from itertools import zip_longest 5 | from typing import ( 6 | Any, 7 | Generic, 8 | Iterable, 9 | Iterator, 10 | Mapping, 11 | Sequence, 12 | Type, 13 | TypeVar, 14 | Union, 15 | ) 16 | 17 | from .utils import EMPTY, get_nested, merge_nested 18 | 19 | T = TypeVar("T") 20 | FV = TypeVar("FV") 21 | FP = TypeVar("FP", bound="FragmentPath") 22 | F = TypeVar("F", bound="Fragment") 23 | 24 | 25 | class FragmentPath(Sequence): 26 | """Path indicating nested levels of a fragment value.""" 27 | 28 | def __init__(self, iterable: Iterable = ()) -> None: 29 | """Assign initial iterable data.""" 30 | self._data: list = list(iterable) 31 | 32 | @classmethod 33 | def from_spec(cls: Type[FP], spec: Union[str, int, Sequence]) -> FP: 34 | """Construct fragment path from complex spec. 35 | 36 | Examples: 37 | >>> FragmentPath.from_spec('a.b.0.c') 38 | FragmentPath(['a', 'b', 0, 'c']) 39 | >>> FragmentPath([1]) 40 | FragmentPath([1]) 41 | >>> FragmentPath(['a', 'b']) 42 | FragmentPath(['a', 'b']) 43 | 44 | """ 45 | return cls(cls._iter_spec(spec)) 46 | 47 | @staticmethod 48 | def _iter_spec(spec: Any) -> Iterator: 49 | try: 50 | spec_iter = spec.split(".") 51 | except AttributeError: 52 | try: 53 | spec_iter = iter(spec) 54 | except TypeError: 55 | yield spec 56 | return 57 | 58 | for item in spec_iter: 59 | with suppress(AttributeError): 60 | if item.isdigit(): 61 | item = int(item) 62 | yield item 63 | 64 | def __len__(self) -> int: 65 | return len(self._data) 66 | 67 | def __iter__(self) -> Iterator: 68 | yield from self._data 69 | 70 | def __getitem__(self, index) -> Any: 71 | return self._data[index] 72 | 73 | def __repr__(self) -> str: 74 | return "{}({})".format(type(self).__qualname__, repr(self._data)) 75 | 76 | def __str__(self) -> str: 77 | return f"{type(self).__qualname__}({self._data})" 78 | 79 | def __eq__(self, other) -> bool: 80 | return type(self) == type(other) and self._data == other._data 81 | 82 | def expand(self, value: Any = None) -> Any: 83 | """Expand path to object. 84 | 85 | Depending on each entry of the path a dictionary or list is created. 86 | Entries that are not defined are will with the :data:`EMPTY` object. 87 | 88 | Example: 89 | >>> FragmentPath(['a', 1, 'b']).expand() 90 | {'a': [, {'b': None}]} 91 | 92 | """ 93 | if not self._data: 94 | return value 95 | if self._data and self._is_list_index(self._data[0]): 96 | new_value: Union[dict, list] = [EMPTY] * (self._data[0] + 1) 97 | else: 98 | new_value = {} 99 | sub_value = new_value 100 | for subpath, next_subpath in zip_longest(self._data[:-1], self._data[1:]): 101 | if self._is_list_index(next_subpath): 102 | sub_value[subpath] = [EMPTY] * (next_subpath + 1) 103 | else: 104 | sub_value[subpath] = {} 105 | sub_value = sub_value[subpath] 106 | 107 | # last path value holds the actual value 108 | sub_value[self._data[-1]] = value 109 | 110 | return new_value 111 | 112 | def common(self: FP, other: Sequence) -> FP: 113 | """Given a second path, return the part of the sequence up to the point where they first differ.""" 114 | common_path = [] 115 | other_path: FragmentPath = type(self)(other) 116 | for subpath, subpath_other in zip(self._data, other_path): 117 | if subpath == subpath_other: 118 | common_path.append(subpath) 119 | else: 120 | break 121 | return type(self)(common_path) 122 | 123 | @classmethod 124 | def _is_list_index(cls, index) -> bool: 125 | """Check if index is a list index.""" 126 | return isinstance(index, int) 127 | 128 | 129 | class Fragment(Generic[FV]): 130 | """Data fragment for storing a value and metadata related to it.""" 131 | 132 | path: FragmentPath 133 | 134 | def __init__(self, value: FV, source: str = "", path: Sequence = ()) -> None: 135 | """Initialize fragment.""" 136 | self.value = value 137 | self.source = source 138 | self.path = FragmentPath(path) 139 | 140 | def __repr__(self) -> str: 141 | return "{}(value={}, source={}, path={})".format( 142 | type(self).__qualname__, 143 | repr(self.value), 144 | repr(self.source), 145 | repr(self.path), 146 | ) 147 | 148 | def __eq__(self, other) -> bool: 149 | return ( 150 | type(self) == type(other) 151 | and self.value == other.value 152 | and self.source == other.source 153 | and self.path == other.path 154 | ) 155 | 156 | def iter_leaves(self: F) -> Iterator[F]: 157 | """Iterate over all leaves of a fragment. 158 | 159 | A leaf is obtained by walking through any nested dictionaries until a 160 | non-dictionary value is found. 161 | 162 | """ 163 | if isinstance(self.value, Mapping): 164 | items: Iterable[tuple] = self.value.items() 165 | elif isinstance(self.value, Sequence) and not isinstance(self.value, str): 166 | items = enumerate(self.value) 167 | else: 168 | # Can't obtain any items so just assume this is a leaf 169 | yield self 170 | return 171 | 172 | for k, v in items: 173 | yield from self.clone(value=v, path=list(self.path) + [k]).iter_leaves() 174 | 175 | def expand_value_with_path(self) -> Any: 176 | """Create expanded dictionary where the fragments path acts as nested keys.""" 177 | return self.path.expand(self.value) 178 | 179 | def merge(self: F, other: "Fragment") -> F: 180 | """Merge with another fragment.""" 181 | expanded_value = self.expand_value_with_path() 182 | other_expanded_value = other.expand_value_with_path() 183 | merged_value = merge_nested(expanded_value, other_expanded_value) 184 | 185 | new_path = self.path.common(other.path) 186 | new_value = get_nested(merged_value, new_path) 187 | new_source = ", ".join(str(s) for s in [self.source, other.source] if s) 188 | 189 | return self.clone(value=new_value, source=new_source, path=new_path) 190 | 191 | def clone(self: F, **kwargs) -> F: 192 | """Clone fragment but using ``kwargs`` as alternative constructor arguments.""" 193 | defaults = {"value": self.value, "source": self.source, "path": self.path} 194 | updated_kwargs = {**defaults, **kwargs} 195 | return type(self)(**updated_kwargs) 196 | -------------------------------------------------------------------------------- /climatecontrol/processors.py: -------------------------------------------------------------------------------- 1 | """Fragment processors.""" 2 | 3 | import glob 4 | import logging 5 | import os 6 | from typing import Any, Callable, Iterable, Iterator, Mapping, Sequence, Tuple, Type 7 | 8 | from climatecontrol.constants import REMOVED 9 | from climatecontrol.file_loaders import ( 10 | FileLoader, 11 | NoCompatibleLoaderFoundError, 12 | iter_load, 13 | load_from_filepath, 14 | ) 15 | from climatecontrol.fragment import Fragment, FragmentPath 16 | from climatecontrol.utils import parse_as_json_if_possible 17 | 18 | logger = logging.getLogger(__name__) 19 | 20 | 21 | def find_suffix(fragment: Fragment, suffix: str) -> Iterator[Fragment]: 22 | value = fragment.value 23 | if isinstance(value, Mapping): 24 | items: Iterable[tuple] = value.items() 25 | elif isinstance(value, Sequence) and not isinstance(value, str): 26 | items = enumerate(value) 27 | else: 28 | return 29 | 30 | for k, v in items: 31 | new = fragment.clone(value=v, path=list(fragment.path) + [k]) 32 | if isinstance(k, str) and k.endswith(suffix): 33 | yield new 34 | else: 35 | yield from find_suffix(new, suffix) 36 | 37 | 38 | def replace_from_pattern( 39 | fragment: Fragment, 40 | postfix_trigger: str, 41 | transform_value: Callable[[Any, FragmentPath], Any], 42 | expected_exceptions: Tuple[Type[Exception], ...] = (), 43 | ): 44 | """Replace settings values using a given value transformation. 45 | 46 | Args: 47 | fragment: original fragment to search 48 | postfix_trigger: String at end of key that should trigger the transformation 49 | transform_value: Function to use to transform the value. The function should take two arguments: 50 | * value: the value to transform 51 | * path: the fragment path at which the value was found. 52 | exected_exceptions: Tuple of exceptions to ignore if they are 53 | raised. In this case the original key and it's value that 54 | triggered the transformation is removed, and is not replaced 55 | with a new value. 56 | 57 | Yields: 58 | Additional fragments to patch the original fragment. 59 | 60 | """ 61 | for leaf in find_suffix(fragment, postfix_trigger): 62 | path = leaf.path 63 | value = leaf.value 64 | 65 | if not path or value == REMOVED: 66 | continue 67 | 68 | key = path[-1] 69 | 70 | yield leaf.clone(value=REMOVED, path=path) 71 | 72 | try: 73 | # This allows "transform_value" to be a generator function as well. 74 | new_value = transform_value(value, path) 75 | if isinstance(new_value, Iterator): 76 | items: list = list(new_value) 77 | else: 78 | items = [new_value] 79 | except expected_exceptions: 80 | continue 81 | 82 | new_key = key[: -len(postfix_trigger)] 83 | new_path = list(path[:-1]) 84 | if new_key: 85 | new_path += [new_key] 86 | 87 | for item in items: 88 | if isinstance(item, Fragment): 89 | kwargs = {} 90 | if item.source: 91 | kwargs["source"] = leaf.source + f":{item.source}" 92 | yield leaf.clone(value=item.value, path=new_path, **kwargs) 93 | else: 94 | yield leaf.clone(value=item, path=new_path) 95 | 96 | 97 | def replace_from_env_vars( 98 | fragment: Fragment, postfix_trigger: str = "_from_env" 99 | ) -> Iterator[Fragment]: 100 | """Read and replace settings values from environment variables. 101 | 102 | Args: 103 | fragment: Fragment to process 104 | postfix_trigger: Optionally configurable string to trigger a 105 | replacement with an environment variable. If a key is found which 106 | ends with this string, the value is assumed to be the name of an 107 | environemtn variable and the settings value will be set to the 108 | contents of that variable. 109 | 110 | Yields: 111 | Additional fragments to patch the original fragment. 112 | 113 | """ 114 | 115 | class ExpectedTransformError(Exception): 116 | pass 117 | 118 | def transform_value(value, path): 119 | if not isinstance(value, str): 120 | raise ValueError( 121 | f"{postfix_trigger} replacement expects a string a a variable." 122 | ) 123 | if "$" in value: 124 | env_var_value = os.path.expandvars(value) 125 | else: 126 | try: 127 | env_var_value = os.environ[value] 128 | except KeyError as e: 129 | logger.info( 130 | "Error while trying to load environment variable: %s from %s. (%s) Skipping...", 131 | value, 132 | ".".join(str(p) for p in path), 133 | e, 134 | ) 135 | raise ExpectedTransformError() 136 | return parse_as_json_if_possible(env_var_value) 137 | 138 | yield from replace_from_pattern( 139 | fragment, postfix_trigger, transform_value, (ExpectedTransformError,) 140 | ) 141 | 142 | 143 | def replace_from_file_vars( 144 | fragment: Fragment, postfix_trigger: str = "_from_file" 145 | ) -> Iterator[Fragment]: 146 | """Read and replace settings values from content local files. 147 | 148 | Args: 149 | fragment: Fragment to process 150 | postfix_trigger: Optionally configurable string to trigger a local 151 | file value. If a key is found which ends with this string, the 152 | value is assumed to be a file path and the settings value will 153 | be set to the content of the file. 154 | 155 | Yields: 156 | Additional fragments to patch the original fragment. 157 | 158 | """ 159 | 160 | def transform_value(value: Any, path: FragmentPath) -> Any: 161 | if isinstance(value, list): 162 | # if we get a list, process each item one after another. 163 | for item in value: 164 | yield from transform_value(item, path) 165 | return 166 | 167 | if not isinstance(value, str): 168 | raise ValueError("file path must be string") 169 | 170 | try: 171 | if glob.has_magic(value): 172 | yield from iter_load(value) 173 | return 174 | try: 175 | yield load_from_filepath(value) 176 | return 177 | except NoCompatibleLoaderFoundError: 178 | # just load as plain text file and interpret as string 179 | with open(value) as f: 180 | yield f.read().strip() 181 | return 182 | except FileNotFoundError as e: 183 | logger.info( 184 | "Error while trying to load variable from file: %s. (%s) Skipping...", 185 | value, 186 | ".".join(str(p) for p in path), 187 | e, 188 | ) 189 | 190 | yield from replace_from_pattern(fragment, postfix_trigger, transform_value) 191 | 192 | 193 | def replace_from_content_vars(fragment: Fragment) -> Iterator[Fragment]: 194 | """Read and replace settings values from content local files. 195 | 196 | Args: 197 | fragment: Fragment to process 198 | 199 | Yields: 200 | Additional fragments to patch the original fragment. 201 | 202 | """ 203 | 204 | file_loader_map = { 205 | ext.strip("."): loader 206 | for loader in FileLoader.registered_loaders 207 | for ext in loader.valid_file_extensions 208 | } 209 | 210 | for format_name, loader in file_loader_map.items(): 211 | postfix_trigger = f"_from_{format_name}_content" 212 | 213 | def transform_value(value, path: FragmentPath): 214 | try: 215 | return loader.from_content(value) 216 | except Exception: 217 | path_str = ".".join(str(p) for p in path) 218 | logger.info( 219 | "Error while trying to load %s content at %s.", 220 | format_name, 221 | path_str, 222 | ) 223 | raise 224 | 225 | yield from replace_from_pattern( 226 | fragment, postfix_trigger, transform_value, (Exception,) 227 | ) 228 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | |Build Status| |Coverage Status| |PyPi version| |PyPI license| |PyPI pyversions| |Conda version| 2 | |Code Style Black| 3 | 4 | 5 | .. image:: https://raw.githubusercontent.com/daviskirk/climatecontrol/logo/climatecontrol-text.svg?sanitize=true 6 | 7 | 8 | CLIMATECONTROL controls your applications settings and configuration 9 | environment. It is a Python library for loading app configurations from files 10 | and/or namespaced environment variables. 11 | 12 | Features 13 | ======== 14 | 15 | * Separation of settings and code 16 | * Loading from files (`.yaml`, `.json`, `.toml`) 17 | * Loading multiple files using glob syntax 18 | * Loading from environment variables, including loading of nested values 19 | * Freely reference nested configurations via files or environment variables 20 | * CLI integration 21 | * Validation using the Validation library of your choice 22 | * Logging configuration integration 23 | * Testing integration 24 | 25 | 26 | Install 27 | ======= 28 | 29 | :: 30 | 31 | pip install climatecontrol 32 | 33 | 34 | 35 | Usage 36 | ===== 37 | 38 | Set some environment variables in your shell 39 | 40 | .. code:: sh 41 | 42 | export CLIMATECONTROL_VALUE1=test1 43 | export CLIMATECONTROL_VALUE2=test2 44 | 45 | Then use them in your python modules: 46 | 47 | .. code:: python 48 | 49 | from climatecontrol import climate 50 | print(climate.settings) 51 | 52 | { 53 | 'value1': 'test1', 54 | 'value2': 'test2' 55 | } 56 | 57 | In case you want to update your settings or your environment variables have 58 | changed and you want to reload them, the `update` method will reload your 59 | settings: 60 | 61 | .. code:: python 62 | 63 | import os 64 | os.environ['CLIMATECONTROL_VALUE3'] = 'new_env_data' 65 | climate.reload() 66 | print(climate.settings) 67 | 68 | { 69 | 'value1': 'test1', 70 | 'value2': 'test2', 71 | 'value3': 'new_env_data' 72 | } 73 | 74 | 75 | Now you've noticed that you want more complex configurations and need nested 76 | settings. For this situation we can delimit sections using a double underscore: 77 | 78 | .. code:: sh 79 | 80 | export CLIMATECONTROL_SECTION1__VALUE1=test1 81 | export CLIMATECONTROL_SECTION2__VALUE2=test2 82 | export CLIMATECONTROL_SECTION2__VALUE3=test3 83 | export CLIMATECONTROL_SECTION2__SUB_SECTION__VALUE4=test4 84 | 85 | .. code:: python 86 | 87 | from climatecontrol import climate 88 | print(climate.settings) 89 | 90 | { 91 | 'section1': { 92 | 'value1': 'test1' 93 | }, 94 | 'section2': { 95 | 'value2': 'test2', 96 | 'value3': 'test3', 97 | 'sub_section': { 98 | 'value4': 'test4' 99 | } 100 | } 101 | } 102 | 103 | 104 | Settings file support 105 | --------------------- 106 | 107 | If you don't want to use an environment variable for every single setting and 108 | want to put your settings in a single file instead you can to this as well. 109 | Settings files can be yaml files (`.yml`/ `.yaml`), json files (`.json`) or toml_ files (`.toml`). 110 | 111 | .. code-block:: sh 112 | 113 | export CLIMATECONTROL_SETTINGS_FILE=./my_settings_file.yml 114 | 115 | 116 | The file could look like this: 117 | 118 | .. code-block:: yaml 119 | 120 | # ./climatecontrol_settings.yaml 121 | section1: 122 | subsection1 = test1 123 | 124 | section2: 125 | subsection2: test2 126 | subsection3: test3 127 | 128 | 129 | or in toml form: 130 | 131 | .. code-block:: sh 132 | 133 | # ./climatecontrol_settings.toml 134 | [section1] 135 | subsection1 = "test1" 136 | 137 | [section2] 138 | subsection2 = "test2" 139 | subsection3 = "test3" 140 | 141 | 142 | In the following documentation examples, yaml files will be used, but any 143 | examples will work using the other file syntaxes as well. 144 | 145 | See the `climatecontrol.core.Climate.inferred_settings_files` docstring 146 | for further examples of how settings files are loaded and how they can be named. 147 | Also note that you can set your own settings files explicitely either by 148 | settings an environment variable: 149 | 150 | .. code-block:: sh 151 | 152 | export CLIMATECONTROL_SETTINGS_FILE="mysettings.yaml, mysettings.toml, override.yml" 153 | 154 | or by adding them in code: 155 | 156 | .. code-block:: python 157 | 158 | climate.settings_files.extend(["mysettings.yaml", "mysettings.toml", "override.yml"] 159 | 160 | 161 | Advanced Features 162 | ----------------- 163 | 164 | Setting variables from values saved in files 165 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ 166 | 167 | Sometimes we don't want to save values in plain text in environment files or in 168 | the settings file itself. Instead we have a file that contains the value of the 169 | setting we want. A good example for this behaviour are docker secrets_ that 170 | store secrets in temporary files. 171 | 172 | To read a variable from a file, simply add a `"_from_file"` to the variable 173 | name and give it the path to the file that contains the variable as a value. 174 | 175 | Using a settings file with the contents (in this case yaml): 176 | 177 | .. code-block:: yaml 178 | 179 | section1: 180 | subsection1_from_file: /home/myuser/supersecret.txt 181 | 182 | or using an environment variable: 183 | 184 | .. code-block:: sh 185 | 186 | export CLIMATECONTROL_SECTION1_SUBSECTION1_FROM_FILE="/home/myuser/supersecret.txt" 187 | 188 | will both write the content of the file at `"/home/myuser/supersecret.txt"` 189 | into the variable `section1 -> subsection1`. 190 | 191 | 192 | Setting variables from values saved in specific environment variables 193 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ 194 | 195 | Similarly, to read a value from an environment variable, add a `"_from_env"` to 196 | the variable name. For example if we wanted to obtain a value from the variable 197 | `SPECIFIC_ENV_VAR`: 198 | 199 | .. code-block:: sh 200 | 201 | export SPECIFIC_ENV_VAR="some value" 202 | 203 | Using a settings file with the contents (in this case yaml): 204 | 205 | .. code-block:: yaml 206 | 207 | section1: 208 | subsection1_from_env: SPECIFIC_ENV_VAR 209 | 210 | or using an environment variable: 211 | 212 | .. code-block:: sh 213 | 214 | export CLIMATECONTROL_SECTION1_SUBSECTION1_FROM_FILE="/home/myuser/supersecret.txt" 215 | 216 | will both write "some value" into the variable `section1 -> subsection1`. 217 | 218 | Settings variables from serialized content 219 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ 220 | 221 | .. code-block:: yaml 222 | 223 | section1_from_json_content: '{"subsection1": "test", "subsection2": 2}' 224 | section2_from_toml_content: 'subsection1 = "test"\nsubsection2 = 2\n' 225 | section3_from_yaml_content: 'subsection1: test\nsubsection2: 2\n' 226 | 227 | 228 | The equivilant environment variables are also handled correctly: 229 | 230 | .. code-block:: sh 231 | 232 | CLIMATECONTROL_SECTION1_FROM_JSON_CONTENT='{"subsection1": "test", "subsection2": 2}' 233 | CLIMATECONTROL_SECTION2_FROM_TOML_CONTENT='subsection1 = "test"\nsubsection2 = 2\n' 234 | CLIMATECONTROL_SECTION3_FROM_YAML_CONTENT='subsection1: test\nsubsection2: 2\n' 235 | 236 | 237 | Nested settings files 238 | ^^^^^^^^^^^^^^^^^^^^^ 239 | 240 | In addition, file variables can also target other settings files directly. To 241 | do this, just make sure the target file is has an extension supported by 242 | climate control. A simple example is illustrated here. Given a settings file: 243 | 244 | .. code-block:: yaml 245 | 246 | value1: "spam" 247 | section1_from_file: /home/myuser/nestedfile.yaml 248 | 249 | 250 | where the content of `/home/myuser/nestedfile.yaml` is: 251 | 252 | .. code-block:: yaml 253 | 254 | value2: "cheese" 255 | subsection: 256 | value3: "parrot" 257 | 258 | which would result in a settings structure: 259 | 260 | .. code-block:: python 261 | 262 | { 263 | "value1": "spam", 264 | "section1": { 265 | "value2": "cheese", 266 | "subsection": { 267 | "value3": "parrot" 268 | } 269 | } 270 | } 271 | 272 | You can also expand the settings at the root of the document by using only 273 | "_from_file" as the key: 274 | 275 | .. code-block:: yaml 276 | 277 | value1: "spam" 278 | _from_file: /home/myuser/nestedfile.yaml 279 | 280 | .. code-block:: python 281 | 282 | { 283 | "value1": "spam", 284 | "value2": "cheese", 285 | "subsection": { 286 | "value3": "parrot" 287 | } 288 | } 289 | 290 | 291 | Extensions 292 | ---------- 293 | 294 | While the default `climate` object is great for most uses, perhaps you already 295 | have a settings object style that you like or use a specific library for 296 | validation. In these cases, CLIMATECONTROL can be extended to use these 297 | libraries. 298 | 299 | Dataclasses 300 | ^^^^^^^^^^^ 301 | 302 | >>> from climatecontrol.ext.dataclasses import Climate 303 | >>> from dataclasses import dataclass, field 304 | >>> 305 | >>> @dataclass 306 | ... class SettingsSubSchema: 307 | ... d: int = 4 308 | ... 309 | >>> @dataclass 310 | ... class SettingsSchema: 311 | ... a: str = 'test' 312 | ... b: bool = False 313 | ... c: SettingsSubSchema = field(default_factory=SettingsSubSchema) 314 | ... 315 | >>> climate = Climate(dataclass_cls=SettingsSchema) 316 | >>> # defaults are initialized automatically: 317 | >>> climate.settings.a 318 | 'test' 319 | >>> climate.settings.c.d 320 | 4 321 | >>> # Types are checked if given 322 | >>> climate.update({'c': {'d': 'boom!'}}) 323 | Traceback (most recent call last): 324 | ... 325 | dacite.exceptions.WrongTypeError: wrong type for field "c.d" - should be "int" instead of "str" 326 | 327 | 328 | Pydantic 329 | ^^^^^^^^ 330 | 331 | Pydantic is a great data validation library: 332 | https://github.com/samuelcolvin/pydantic and climatecontrol also provides a 333 | simple extension to use pydantic models directly (typing functionality mentioned 334 | above works here as well). 335 | 336 | Supports both Pydantic v1 (>=1.7.4) and v2 (>=2.0). 337 | 338 | >>> from climatecontrol.ext.pydantic import Climate 339 | >>> 340 | >>> class SettingsSubSchema(BaseModel): 341 | ... d: int = 4 342 | ... 343 | >>> class SettingsSchema(BaseModel): 344 | ... a: str = 'test' 345 | ... b: bool = False 346 | ... c: SettingsSubSchema = SettingsSubSchema() 347 | ... 348 | >>> climate = Climate(model=SettingsSchema) 349 | >>> # defaults are initialized automatically: 350 | >>> climate.settings.a 351 | 'test' 352 | >>> climate.settings.c.d 353 | 4 354 | >>> # Types are checked if given 355 | >>> climate.update({'c': {'d': 'boom!'}}) 356 | Traceback (most recent call last): 357 | ... 358 | pydantic.error_wrappers.ValidationError: 1 validation error for SettingsSchema 359 | c -> d 360 | value is not a valid integer (type=type_error.integer) 361 | 362 | 363 | Integrations 364 | ------------ 365 | 366 | Command line support using click 367 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ 368 | 369 | The click_ library is a great tool for creating command line applications. If 370 | you don't want to have to use an environment to set your configuration file. 371 | Write your command line application like this: 372 | 373 | .. code-block:: python 374 | 375 | import click 376 | 377 | @click.command() 378 | @climate.click_settings_file_option() 379 | def cli(): 380 | print(climate.settings) 381 | 382 | save it to a file like "cli.py" and then call it after installing click: 383 | 384 | .. code-block:: sh 385 | 386 | pip install click 387 | python cli.py --settings ./my_settings_file.toml 388 | 389 | whithout needing to set any env vars. 390 | 391 | Multiple files are supported. They will be automatically recursively merged 392 | with the last file overriting any overlapping keys of the first file. 393 | 394 | .. code-block:: sh 395 | 396 | pip install click 397 | python cli.py --settings ./my_settings_file.toml --settings ./my_settings_file.yaml 398 | 399 | 400 | Logging 401 | ^^^^^^^ 402 | 403 | If you have a "logging" section in your settings files, you can configure 404 | python standard library logging using that section directly: 405 | 406 | .. code:: yaml 407 | 408 | logging: 409 | formatters: 410 | default: 411 | format': "%(levelname)s > %(message)s" 412 | root: 413 | level: DEBUG 414 | 415 | 416 | .. code:: python 417 | 418 | import logging 419 | from climatecontrol import climate 420 | 421 | climate.setup_logging() 422 | logging.debug('test') 423 | # outputs: DEBUG > test 424 | 425 | 426 | Testing 427 | ------- 428 | 429 | When testing your application, different behaviours often depend on settings 430 | taking on different values. Assuming that you are using a single `Settings` 431 | object accross multiple functions or modules, handling these settings changes 432 | in tests can be tricky. 433 | 434 | The settings object provides a simple method for modifying your settings object 435 | temporarily: 436 | 437 | .. code-block:: python 438 | 439 | climate.update({'a': 1}) 440 | # Enter a temporary changes context block: 441 | with climate.temporary_changes(): 442 | climate.update({'a': 1}) 443 | # Inside the context, the settings can be modified and used as you choose 444 | print(climate['a']) # outputs: 2 445 | # After the context exits the settings map 446 | print(climate['a']) # outputs: 1 447 | 448 | 449 | Contributing 450 | ============ 451 | 452 | See: `CONTRIBUTING.md <./CONTRIBUTING.md>`__ 453 | 454 | 455 | .. |Build Status| image:: https://img.shields.io/github/workflow/status/daviskirk/climatecontrol/ci?style=flat-square 456 | :target: https://github.com/daviskirk/climatecontrol 457 | .. |Coverage Status| image:: https://img.shields.io/codecov/c/github/daviskirk/climatecontrol/master?style=flat-square 458 | :target: https://codecov.io/gh/daviskirk/climatecontrol 459 | .. |PyPI version| image:: https://img.shields.io/pypi/v/climatecontrol?style=flat-square 460 | :target: https://pypi.python.org/pypi/climatecontrol/ 461 | .. |PyPI license| image:: https://img.shields.io/pypi/l/climatecontrol?style=flat-square 462 | :target: https://pypi.python.org/pypi/climatecontrol/ 463 | .. |PyPI pyversions| image:: https://img.shields.io/pypi/pyversions/climatecontrol?style=flat-square 464 | :target: https://pypi.python.org/pypi/climatecontrol/ 465 | .. |Conda version| image:: https://img.shields.io/conda/vn/conda-forge/climatecontrol?style=flat-square 466 | :target: https://anaconda.org/conda-forge/climatecontrol 467 | .. |Code Style Black| image:: https://img.shields.io/badge/code%20style-black-000000.svg?style=flat-square 468 | :target: https://github.com/psf/black 469 | .. _click: http://click.pocoo.org/ 470 | .. _toml: https://github.com/toml-lang/toml 471 | .. _secrets: https://docs.docker.com/engine/swarm/secrets 472 | -------------------------------------------------------------------------------- /climatecontrol/core.py: -------------------------------------------------------------------------------- 1 | """Climate parser.""" 2 | 3 | import logging 4 | from contextlib import contextmanager 5 | from copy import deepcopy 6 | from itertools import chain 7 | from pathlib import Path 8 | from pprint import pformat 9 | from typing import ( 10 | Any, 11 | Callable, 12 | Iterable, 13 | Iterator, 14 | List, 15 | Mapping, 16 | MutableMapping, 17 | MutableSequence, 18 | Optional, 19 | Sequence, 20 | Tuple, 21 | TypeVar, 22 | Union, 23 | ) 24 | 25 | import wrapt 26 | 27 | from climatecontrol.constants import REMOVED 28 | from climatecontrol.env_parser import EnvParser 29 | from climatecontrol.file_loaders import FileLoader, iter_load 30 | from climatecontrol.fragment import Fragment, FragmentPath 31 | from climatecontrol.logtools import DEFAULT_LOG_SETTINGS, logging_config 32 | from climatecontrol.processors import ( 33 | replace_from_content_vars, 34 | replace_from_env_vars, 35 | replace_from_file_vars, 36 | ) 37 | from climatecontrol.utils import merge_nested 38 | 39 | try: 40 | import click 41 | except ImportError: 42 | click = None # type: ignore 43 | 44 | 45 | logger = logging.getLogger(__name__) 46 | T = TypeVar("T", bound=wrapt.ObjectProxy) 47 | 48 | 49 | class ObjectProxy(wrapt.ObjectProxy): 50 | """Simple object proxy with added representation of wrapped object.""" 51 | 52 | def __repr__(self) -> str: 53 | return repr(self.__wrapped__) 54 | 55 | 56 | class SettingsItem(ObjectProxy): 57 | """Object proxy for representing a nested settings item. 58 | 59 | An object proxy acts like the underlying but adds functionality on top. 60 | In this case the SettingsItem object ensures immutability of the object as 61 | changing a settings object can have unexpected behaviour as the underlying 62 | :class:`Climate` data is not changed and updates the same way. 63 | 64 | The settings item object ensures that any "nested" objects are also 65 | represented as :class:`SettingsItem` object. 66 | 67 | Examples: 68 | 69 | >>> climate = Climate() 70 | >>> s = SettingsItem({'a': 5, 'b': {'c': 6}}, climate, FragmentPath([])) 71 | >>> s 72 | {'a': 5, 'b': {'c': 6}} 73 | >>> s.a 74 | 5 75 | >>> s.b 76 | {'c': 6} 77 | >>> type(s.b) 78 | 79 | 80 | """ 81 | 82 | def __init__(self, wrapped, climate: "Climate", path: FragmentPath) -> None: 83 | super().__init__(wrapped) 84 | self._self_climate = climate 85 | self._self_path = path 86 | 87 | def __repr__(self) -> str: 88 | self._self_climate.ensure_initialized() 89 | return super().__repr__() 90 | 91 | def __getattr__(self, key): 92 | self._self_climate.ensure_initialized() 93 | try: 94 | result = getattr(self.__wrapped__, key) 95 | except AttributeError as e: 96 | try: 97 | result = self.__wrapped__[key] 98 | except (TypeError, KeyError): 99 | raise e 100 | if self._self_is_mutable(result): 101 | return type(self)( 102 | result, 103 | self._self_climate, 104 | type(self._self_path)(list(self._self_path) + [key]), 105 | ) 106 | return result 107 | 108 | def __deepcopy__(self: T, memo: dict) -> T: 109 | return type(self)( 110 | deepcopy(self.__wrapped__, memo), self._self_climate, self._self_path 111 | ) 112 | 113 | def __setattr__(self, key: str, value) -> None: 114 | is_proxy_key = hasattr(key, "startswith") and key.startswith("_self_") 115 | if not is_proxy_key: 116 | raise TypeError(f"{type(self)} does not support attribute assignment") 117 | super().__setattr__(key, value) 118 | 119 | def __delattr__(self, key: str) -> None: 120 | is_proxy_key = hasattr(key, "startswith") and key.startswith("_self_") 121 | if not is_proxy_key: 122 | raise TypeError(f"{type(self)} does not support attribute deletion") 123 | super().__delattr__(key) 124 | 125 | def __getitem__(self, key): 126 | self._self_climate.ensure_initialized() 127 | result = self.__wrapped__.__getitem__(key) 128 | if self._self_is_mutable(result): 129 | return type(self)( 130 | result, 131 | self._self_climate, 132 | type(self._self_path)(list(self._self_path) + [key]), 133 | ) 134 | return result 135 | 136 | def __setitem__(self, key, value) -> None: 137 | raise TypeError(f"{type(self)} does not support item assignment") 138 | 139 | def __delitem__(self, key) -> None: 140 | raise TypeError(f"{type(self)} does not support item deletion") 141 | 142 | @classmethod 143 | def _self_is_mutable(cls, value: Any) -> bool: 144 | return isinstance(value, (MutableMapping, MutableSequence)) 145 | 146 | 147 | class Climate: 148 | """A Climate instance allows settings to be loaded from a settings file or environment variables. 149 | 150 | Attributes: 151 | settings_files: If set, a sequence of paths to settings files (json, yaml or toml 152 | format) from which all settings are loaded. The files are 153 | loaded one after another with variables set in later files 154 | overwriting values set in previous files. 155 | env_parser: `EnvParser` object handling the parsing of environment variables 156 | parser: If given, defines a custom function to further process the 157 | result of the settings. The function should take a single 158 | nested dictionary argument (the settings map) as an argument 159 | and output a nested dictionary. 160 | 161 | Args: 162 | settings_files: See attribute 163 | parser: See attribute 164 | **env_parser_kwargs: Arguments passed to :class:`EnvParser` constructor. 165 | 166 | Example: 167 | >>> import os 168 | >>> os.environ['MY_APP_VALUE0'] = 'test0' 169 | >>> os.environ['MY_APP_SECTION1__SUB1'] = 'test1' 170 | >>> os.environ['MY_APP_SECTION2__SUB2'] = 'test2' 171 | >>> os.environ['MY_APP_SECTION2__SUB3'] = 'test3' 172 | >>> climate = Climate(prefix='MY_APP') 173 | >>> dict(climate.settings) 174 | {'value0': 'test0', 'section1': {'sub1': 'test1'}, 'section2': {'sub2': 'test2', 'sub3': 'test3'}} 175 | 176 | See Also: 177 | EnvParser 178 | 179 | """ 180 | 181 | settings_files: List[Union[str, Path]] 182 | _combined_fragment: Fragment 183 | _updates: List 184 | _fragments: List[Fragment] 185 | _data: Any 186 | _initialized: bool 187 | _processors: Tuple[Callable[[Fragment], Iterator[Fragment]], ...] = ( 188 | replace_from_file_vars, 189 | replace_from_env_vars, 190 | replace_from_content_vars, 191 | ) 192 | 193 | def __init__( 194 | self, 195 | settings_files: Union[str, Path, Sequence[Union[str, Path]]] = (), 196 | parser: Optional[Callable[[Mapping], Mapping]] = None, 197 | **env_parser_kwargs: Any, 198 | ) -> None: 199 | """Initialize settings object.""" 200 | self.env_parser = EnvParser(**(env_parser_kwargs or {})) 201 | self.parser = parser 202 | if isinstance(settings_files, (str, Path)): 203 | self.settings_files = [settings_files] 204 | else: 205 | self.settings_files = list(settings_files) 206 | self._updates = [] 207 | self._fragments = [] 208 | self._initialized = False 209 | # We use an object proxy here so that the referene to the object is always the same. 210 | # Note that instead of assigning _data directly, we reinitialize it using self._set_data(new_obj). 211 | self._data = ObjectProxy(None) 212 | self._combined_fragment = Fragment(None) 213 | 214 | def __repr__(self) -> str: 215 | return self.__class__.__qualname__ + "[\n{}\n]".format(pformat(self._data)) 216 | 217 | @property 218 | def parser(self) -> Optional[Callable[[Mapping], Mapping]]: 219 | """Return settings parser function.""" 220 | return self._parse 221 | 222 | @parser.setter 223 | def parser(self, value: Optional[Callable[[Mapping], Mapping]]) -> None: 224 | """Set the settings parser function.""" 225 | self._parse = value 226 | 227 | @property 228 | def settings(self) -> Any: 229 | """Return a settings item proxy for easy access to settings hierarchy.""" 230 | self.ensure_initialized() 231 | return SettingsItem(self._data, self, FragmentPath()) 232 | 233 | @property 234 | def inferred_settings_files(self) -> List[Path]: 235 | """Infer settings files from current directory and parent directories. 236 | 237 | 1. Search upward until a repository root is found (symbolized by a get repository) 238 | 2. Along the directories starting with the project root up until the current directory search for the following files: 239 | * Files matching the pattern: `**settings*` 240 | * Files matching the pattern above but within subdirectories named `**settings*` 241 | * Files matching the pattern above in any recursive subdirectories of the subdirectory mentioned above 242 | 243 | Note that the prefix is lower cased even if it is given as upper or mixed case. 244 | 245 | Given a filestructure: 246 | 247 | :: 248 | 249 | 250 | |-- myuser/ 251 | |-- unused_climatecontrol_settings.yaml 252 | |-- myrepo/ 253 | |-- .git/ 254 | |-- base-climatecontrol-settings.json 255 | |-- climatecontrol_settings/ 256 | |-- 01.toml 257 | |-- 02.yml 258 | |-- 0/ 259 | |-- settings.yml 260 | |-- 1/ 261 | |-- settings.json 262 | |-- myproject/ 263 | |-- climatecontrol.general.settings.yaml 264 | |-- mysubproject/ 265 | |-- .climatecontrol.settings.yaml 266 | 267 | and assuming the current working directory is `myuser/myproject/mysubproject`, the inferred settings files would be: 268 | 269 | :: 270 | myuser/myrepo/base-climatecontrol-settings.json 271 | myuser/myrepo/climatecontrol_settings/01.toml 272 | myuser/myrepo/climatecontrol_settings/02.yml 273 | myuser/myrepo/climatecontrol_settings/0/settings.yml 274 | myuser/myrepo/climatecontrol_settings/1/settings.json 275 | myuser/myproject/climatecontrol.general.settings.yaml 276 | myuser/mysubproject/.climatecontrol.settings.yaml 277 | 278 | """ 279 | prefix = self.env_parser.prefix.strip(self.env_parser.split_char).lower() 280 | base_pattern = f"*{prefix}*settings" 281 | extensions = [ 282 | ext 283 | for loader in FileLoader.registered_loaders 284 | for ext in loader.valid_file_extensions 285 | ] 286 | 287 | def find_settings_files(path: Path, glob_pattern: str, recursive=False): 288 | glob = path.rglob if recursive else path.glob 289 | filepaths = [] 290 | for ext in extensions: 291 | for filepath in glob(f"{glob_pattern}{ext}"): 292 | if filepath.is_file(): 293 | filepaths.append(filepath) 294 | return sorted(filepaths) 295 | 296 | # Find all directories between current directory and project root 297 | search_directories: List[Path] = [] 298 | project_root_candidates = [ 299 | ".git", 300 | ".hg", 301 | "setup.py", 302 | "requirements.txt", 303 | "environment.yml", 304 | "environment.yaml", 305 | "pyproject.toml", 306 | ] 307 | current_path: Path = Path(".") 308 | while True: 309 | search_directories.append(current_path) 310 | new_current_path = current_path / ".." 311 | if ( 312 | any( 313 | (current_path / candidate).exists() 314 | for candidate in project_root_candidates 315 | ) 316 | or not new_current_path.is_dir() 317 | or new_current_path.resolve() == current_path.resolve() 318 | ): 319 | break 320 | current_path = new_current_path 321 | 322 | # Iterate over all directories and find files 323 | filepaths: List[Path] = [] 324 | for directory in reversed(search_directories): 325 | filepaths.extend(find_settings_files(directory, base_pattern)) 326 | for sub_dir in directory.glob(base_pattern): 327 | if not sub_dir.is_dir(): 328 | continue 329 | # Use all files with valid file extensions if already in settings directory. 330 | filepaths.extend(find_settings_files(sub_dir, "*", recursive=True)) 331 | 332 | return filepaths 333 | 334 | @property 335 | def update_log(self) -> str: 336 | """Log of all each loaded settings variable.""" 337 | 338 | def iter_fragment_lines(fragment: Fragment) -> Iterator[str]: 339 | for leaf in fragment.iter_leaves(): 340 | action = "removed" if leaf.value == REMOVED else "loaded" 341 | yield action + " " + ".".join( 342 | str(p) for p in leaf.path 343 | ) + " from " + str(leaf.source) 344 | 345 | lines = chain.from_iterable( 346 | iter_fragment_lines(fragment) for fragment in self._fragments 347 | ) 348 | result = "\n".join(lines) 349 | return result 350 | 351 | def clear(self) -> None: 352 | """Remove all data and reset to initial state.""" 353 | self._updates.clear() 354 | self._fragments.clear() 355 | self._initialized = False # next access should reload all fragments 356 | 357 | def ensure_initialized(self): 358 | """Ensure that object is initialized and reload if it is not.""" 359 | if not self._initialized: 360 | self.reload() 361 | 362 | def reload(self) -> None: 363 | """Reload data from all sources. 364 | 365 | Updates that were applied manually (through code) are not discarded. Use 366 | :method:`clear` for that. 367 | """ 368 | parsed, combined, fragments = self._stateless_reload(self._updates) 369 | self._set_state(parsed, combined, fragments, self._updates) 370 | 371 | def update( 372 | self, 373 | update_data: Optional[Mapping] = None, 374 | path: Optional[Union[str, int, Sequence]] = None, 375 | ) -> None: 376 | """Update settings using a patch dictionary. 377 | 378 | Args: 379 | update_data: Updates for settings. This is equivilant to `dict.update` except 380 | that the update is recursive for nested dictionaries. 381 | 382 | Example: 383 | >>> import os 384 | >>> os.environ['CLIMATECONTROL_VALUE'] = 'test' 385 | >>> climate = Climate() 386 | >>> dict(climate.settings) 387 | {'value': 'test'} 388 | >>> 389 | >>> # now update the settings 390 | >>> climate.update({'new_value': 'new'}) 391 | >>> climate.settings.value 392 | 'test' 393 | >>> climate.settings.new_value 394 | 'new' 395 | 396 | Alternatively a path can be specified that will be expanded: 397 | 398 | >>> climate.update('test', 'level_1.level_2.0.inlist') 399 | >>> climate.settings.level_1.level_2[0].inlist 400 | 'test' 401 | 402 | 403 | """ 404 | if path is not None: 405 | update_data = FragmentPath.from_spec(path).expand(update_data) 406 | if not self._initialized: 407 | new_updates = ( 408 | self._updates + [update_data] if update_data else self._updates 409 | ) 410 | parsed, combined, fragments = self._stateless_reload(new_updates) 411 | self._set_state(parsed, combined, fragments, new_updates) 412 | return 413 | if not update_data: 414 | return 415 | # we can start directly from the previously consolidated fragment 416 | base_fragments: List[Fragment] = [self._combined_fragment] 417 | new_updates = [update_data] 418 | update_fragments = list(self._iter_update_fragments(new_updates)) 419 | combined = self._combine_fragments(chain(base_fragments, update_fragments)) 420 | expanded = combined.expand_value_with_path() 421 | clean_removed_items(expanded) 422 | parsed = self.parse(expanded) 423 | 424 | fragments = self._fragments + update_fragments 425 | updates = self._updates + new_updates 426 | 427 | self._set_state(parsed, combined, fragments, updates) 428 | 429 | def parse(self, data: Any) -> Any: 430 | """Parse data into settings. 431 | 432 | Args: 433 | data: Raw mapping to be parsed 434 | 435 | Returns: 436 | Parsed data that has run through all preparsers and the `Climate`. 437 | 438 | """ 439 | if self._parse: 440 | return self._parse(data) 441 | else: 442 | return data 443 | 444 | def setup_logging(self, logging_section: str = "logging") -> None: 445 | """Initialize logging. 446 | 447 | Uses the ``'logging'`` section from the global ``SETTINGS`` object if 448 | available. Otherwise uses sane defaults provided by the 449 | ``climatecontrol`` package. 450 | 451 | """ 452 | logging_settings = DEFAULT_LOG_SETTINGS 453 | try: 454 | logging_settings_update = getattr(self.settings, logging_section) 455 | except (KeyError, TypeError, AttributeError): 456 | logging_settings_update = None 457 | if logging_settings_update: 458 | logging_settings = merge_nested(logging_settings, logging_settings_update) 459 | logging_config.dictConfig(logging_settings) 460 | 461 | def click_settings_file_option(self, **kw) -> Callable[..., Any]: 462 | """See :func:`cli_utils.click_settings_file_option`.""" 463 | from climatecontrol import cli_utils 464 | 465 | return cli_utils.click_settings_file_option(self, **kw) 466 | 467 | @contextmanager 468 | def temporary_changes(self): 469 | """Open a context where any changes to the settings are rolled back on context exit. 470 | 471 | This context manager can be used for testing or to temporarily change 472 | settings. 473 | 474 | Example: 475 | >>> from climatecontrol.core import Climate 476 | >>> climate = Climate() 477 | >>> climate.update({'a': 1}) 478 | >>> with climate.temporary_changes(): 479 | ... climate.update({'a': 2}) 480 | ... assert climate.settings['a'] == 2 481 | >>> assert climate.settings['a'] == 1 482 | 483 | """ 484 | archived_data = deepcopy(self._data.__wrapped__) 485 | archived_settings = { 486 | k: deepcopy(getattr(self, k)) 487 | for k in [ 488 | "settings_files", 489 | "_updates", 490 | "_fragments", 491 | "_combined_fragment", 492 | ] 493 | } 494 | yield self 495 | 496 | # reinstate all saved data after context block is finished 497 | self._set_data(archived_data) 498 | for k, v in archived_settings.items(): 499 | setattr(self, k, v) 500 | 501 | def _set_state( 502 | self, parsed: Any, combined: Fragment, fragments: List[Fragment], updates: list 503 | ): 504 | """Set all relevant state fields related to loading of settings on object.""" 505 | self._fragments = fragments 506 | self._combined_fragment = combined 507 | self._set_data(parsed) 508 | self._updates = updates 509 | self._initialized = True 510 | 511 | def _set_data(self, value: Any) -> None: 512 | self._data.__init__(value) 513 | 514 | def _stateless_reload(self, updates: list) -> Tuple[List[Fragment], Fragment, Any]: 515 | """Calculate result of reload but do not use any object state. 516 | 517 | Obtain updates from input instead of from :attrib:`_updates` and return 518 | the results as output instead of storing them in state. 519 | 520 | Args: 521 | updates: List of updates. 522 | Returns: 523 | 524 | Tuple conisting of a list of: The parsed result object, the combined 525 | final fragment, the list of fragments that were used to reach this 526 | result. 527 | """ 528 | base_fragments = self._iter_base_fragments() 529 | update_fragments = self._iter_update_fragments(updates) 530 | fragments = list(chain(base_fragments, update_fragments)) 531 | combined = self._combine_fragments(fragments) 532 | expanded = combined.expand_value_with_path() 533 | clean_removed_items(expanded) 534 | parsed = self.parse(expanded) 535 | return parsed, combined, fragments 536 | 537 | def _process_fragment(self, fragment: Fragment) -> Iterator[Fragment]: 538 | """Preprocess a settings fragment and return the new version.""" 539 | for process in self._processors: 540 | for new_fragment in process(fragment): 541 | yield new_fragment 542 | # recursively process new fragments as well 543 | yield from self._process_fragment(new_fragment) 544 | 545 | def _iter_process_fragments( 546 | self, fragments: Iterable[Fragment] 547 | ) -> Iterator[Fragment]: 548 | for fragment in fragments: 549 | yield fragment 550 | yield from self._process_fragment(fragment) 551 | 552 | def _iter_update_fragments(self, updates: Sequence[Mapping] = ()): 553 | fragments = ( 554 | Fragment(value=update_data, source="external") 555 | for update_data in updates 556 | if update_data 557 | ) 558 | yield from self._iter_process_fragments(fragments) 559 | 560 | def _iter_base_fragments(self) -> Iterator[Fragment]: 561 | """Iterate through all relevant fragments.""" 562 | fragments = chain(self._iter_load_files(), self.env_parser.iter_load()) 563 | yield from self._iter_process_fragments(fragments) 564 | 565 | def _combine_fragments(self, fragments: Iterable[Fragment]) -> Fragment: 566 | """Combine the fragments into one final fragment.""" 567 | combined_fragment: Optional[Fragment] = None 568 | for fragment in fragments: 569 | if combined_fragment is None: 570 | combined_fragment = fragment 571 | else: 572 | combined_fragment = combined_fragment.merge(fragment) 573 | 574 | if not combined_fragment: 575 | combined_fragment = Fragment({}) 576 | return combined_fragment 577 | 578 | def _iter_load_files(self) -> Iterator[Fragment]: 579 | for inferred_entry in self.inferred_settings_files: 580 | yield from iter_load(inferred_entry) 581 | 582 | for entry in self.settings_files: 583 | yield from iter_load(entry) 584 | 585 | 586 | def clean_removed_items(obj): 587 | """Remove all keys that contain a removed key indicated by a :data:``REMOVED`` object.""" 588 | items: Iterable[Tuple[Any, Any]] 589 | if isinstance(obj, MutableMapping): 590 | items = obj.items() 591 | elif isinstance(obj, MutableSequence): 592 | items = enumerate(obj) 593 | else: 594 | return 595 | 596 | keys_to_remove = [] 597 | for key, value in items: 598 | if value == REMOVED: 599 | keys_to_remove.append(key) 600 | else: 601 | clean_removed_items(value) 602 | 603 | for key in keys_to_remove: 604 | del obj[key] 605 | -------------------------------------------------------------------------------- /tests/test_settings.py: -------------------------------------------------------------------------------- 1 | """Test settings.""" 2 | 3 | import json 4 | import os 5 | import sys 6 | from collections.abc import Mapping 7 | from pathlib import Path 8 | from unittest.mock import MagicMock 9 | 10 | import click 11 | import pytest 12 | from click.testing import CliRunner 13 | 14 | from climatecontrol import cli_utils, core # noqa: E402 15 | from climatecontrol.exceptions import NoCompatibleLoaderFoundError 16 | from climatecontrol.fragment import Fragment 17 | 18 | 19 | def test_settings_empty(mock_empty_os_environ): 20 | """Check that initializing empty settings works correctly.""" 21 | climate = core.Climate() 22 | assert isinstance(climate.settings, Mapping) 23 | assert dict(climate.settings) == {} 24 | assert repr(climate) # check that __repr__ works 25 | assert str(climate) # check that __repr__ works 26 | assert str(climate.settings) # check that __repr__ works 27 | assert len(climate.settings) == 0 # length of settings map 28 | 29 | 30 | def test_settings(mock_os_environ): 31 | """Check that initializing settings works correctly.""" 32 | climate = core.Climate(prefix="TEST_STUFF") 33 | assert isinstance(climate.settings, Mapping) 34 | expected = {"testgroup": {"testvar": 7, "test_var": 6}, "testgroup_test_var": 9} 35 | assert dict(climate.settings) == expected 36 | 37 | 38 | def test_settings_parse(mock_os_environ): 39 | """Check that parsing settings runs through without errors.""" 40 | expected = {"bla": "test"} 41 | parser = MagicMock() 42 | parser.return_value = expected 43 | climate = core.Climate(prefix="TEST_STUFF", parser=parser) 44 | assert ( 45 | parser.call_count == 0 46 | ), "Before accessing settings, the parser should not have been called" 47 | assert isinstance(climate.settings, Mapping) 48 | assert dict(climate.settings) == expected 49 | assert ( 50 | parser.call_count == 1 51 | ), "After accessing settings, the parser should have been called" 52 | 53 | 54 | @pytest.mark.parametrize("original", [False, True]) 55 | @pytest.mark.parametrize("file_exists", [False, True]) 56 | def test_parse_from_file_vars(original, file_exists, mock_os_environ, tmpdir): 57 | """Check that the "from_file" extension works as expected. 58 | 59 | Adding the "from_file" suffix should result in the variable being read from 60 | the file and not directly. 61 | 62 | """ 63 | climate = core.Climate() 64 | filepath = tmpdir.join("testvarfile") 65 | filename = str(filepath) 66 | if file_exists: 67 | with open(filename, "w") as f: 68 | f.write("apassword\n") 69 | update_dict = {"this_var_from_file": filename} 70 | if original: 71 | update_dict["this_var"] = "the original password" 72 | climate.update(update_dict) 73 | assert isinstance(climate.settings, Mapping) 74 | actual = dict(climate.settings) 75 | expected = {} 76 | if original: 77 | expected = {"this_var": "the original password"} 78 | if file_exists: 79 | expected = {"this_var": "apassword"} 80 | assert actual == expected 81 | 82 | 83 | def test_parse_from_file_root_var(mock_os_environ, tmpdir): 84 | """Check that the "from_file" extension works as expected when loading from root. 85 | 86 | Loading a key named "_from_file" should load the variables onto the same 87 | level as the "_from_file". 88 | """ 89 | climate = core.Climate() 90 | filepath = tmpdir.join("testfile.yaml") 91 | filename = str(filepath) 92 | with open(filename, "w") as f: 93 | f.write("b: 1\n" "c: 2\n") 94 | update_dict = { 95 | "a": "old", 96 | "b": "old", 97 | "_from_file": filename, 98 | "d": "old", 99 | } 100 | climate.update(update_dict) 101 | assert isinstance(climate.settings, Mapping) 102 | actual = dict(climate.settings) 103 | expected = {"a": "old", "b": 1, "c": 2, "d": "old"} 104 | assert actual == expected 105 | 106 | 107 | @pytest.mark.parametrize( 108 | "value, expected", 109 | [ 110 | ("__testdir__/testfile1.yaml", {"b": 1, "c": 2}), 111 | ( 112 | ["wrong.yaml", "__testdir__/testfile1.yaml", "wrong.yaml"], 113 | {"b": 1, "c": 2}, 114 | ), 115 | ( 116 | [ 117 | "wrong.yaml", 118 | "__testdir__/testfile1.yaml", 119 | "__testdir__/testfile2.yaml", 120 | "wrong.yaml", 121 | ], 122 | {"b": 1, "c": 3}, 123 | ), 124 | ("__testdir__/*", {"b": 1, "c": 3}), 125 | ([], {}), 126 | ("__testdir__/wrong.yaml", {}), 127 | ], 128 | ) 129 | def test_parse_from_files_root_var(mock_os_environ, tmpdir, value, expected): 130 | """Check that the "from_file" extension works as expected when loading from root. 131 | 132 | Loading a key named "_from_file" should load the variables onto the same 133 | level as the "_from_file". 134 | """ 135 | climate = core.Climate() 136 | 137 | filepath = tmpdir.join("testfile1.yaml") 138 | with open(str(filepath), "w") as f: 139 | f.write("b: 1\n" "c: 2\n") 140 | 141 | filepath = tmpdir.join("testfile2.yaml") 142 | with open(str(filepath), "w") as f: 143 | f.write("c: 3\n") 144 | 145 | if isinstance(value, list): 146 | value = [item.replace("__testdir__", str(tmpdir)) for item in value] 147 | else: 148 | value = value.replace("__testdir__", str(tmpdir)) 149 | 150 | update_dict = { 151 | "_from_file": value, 152 | } 153 | 154 | climate.update(update_dict) 155 | assert isinstance(climate.settings, Mapping) 156 | actual = dict(climate.settings) 157 | assert actual == expected 158 | 159 | 160 | @pytest.mark.parametrize( 161 | "settings_update, var_content, expected", 162 | [ 163 | ({"test_var_from_env": "MY_VAR"}, "apassword", {"test_var": "apassword"}), 164 | ( 165 | {"a": {"test_var_from_env": "MY_VAR"}}, 166 | '{"b": "apassword"}', 167 | {"a": {"test_var": {"b": "apassword"}}}, 168 | ), 169 | ( 170 | {"a": [0, {"test_var_from_env": "MY_VAR", "test_var2_from_env": "MY_VAR"}]}, 171 | "1", 172 | {"a": [0, {"test_var": 1, "test_var2": 1}]}, 173 | ), 174 | ({"test_var_from_env": "MY_WRONG_VAR", "b": 3}, "never seen", {"b": 3}), 175 | ], 176 | ) 177 | def test_parse_from_env_vars(mock_os_environ, settings_update, var_content, expected): 178 | """Test replacing environment variables in settings.""" 179 | climate = core.Climate() 180 | os.environ["MY_VAR"] = var_content 181 | climate.update(settings_update) 182 | actual = dict(climate.settings) 183 | assert actual == expected 184 | 185 | 186 | @pytest.mark.parametrize( 187 | "settings_files", ["asd;kjhaflkjhasf", ".", "/home/", [".", "asd;kjhaflkjhasf"]] 188 | ) 189 | def test_settings_files_fail(mock_empty_os_environ, settings_files): 190 | """Check that passing invalid settings files really results in errors.""" 191 | climate = core.Climate(prefix="TEST_STUFF", settings_files=settings_files) 192 | with pytest.raises(NoCompatibleLoaderFoundError): 193 | climate.update() 194 | 195 | 196 | @pytest.mark.parametrize( 197 | "file_str, filename, mock_module", 198 | [ 199 | ("---\na: 5", "test.yaml", "climatecontrol.file_loaders.yaml"), 200 | ("[section]\na = 5", "test.toml", "climatecontrol.file_loaders.tomli"), 201 | ], 202 | ) 203 | def test_file_loader_module_import_fail( 204 | mock_empty_os_environ, monkeypatch, file_str, filename, mock_module, tmpdir 205 | ): 206 | """Check that uninstalled yaml or toml really results in an error.""" 207 | # Check that without mocking everything is file: 208 | path = tmpdir / filename 209 | with open(tmpdir / filename, "w") as f: 210 | f.write(file_str) 211 | 212 | climate = core.Climate(prefix="TEST_STUFF", settings_files=[str(path)]) 213 | climate.update() 214 | # Now fake not having imported yaml 215 | monkeypatch.setattr(mock_module, None) 216 | climate = core.Climate(prefix="TEST_STUFF", settings_files=[str(path)]) 217 | with pytest.raises(ImportError): 218 | climate.update() 219 | 220 | 221 | @pytest.mark.parametrize( 222 | "key,content,expected", 223 | [ 224 | pytest.param( 225 | "root_from_yaml_content", 226 | "a:\n b: 5\n", 227 | {"root": {"a": {"b": 5}}}, 228 | id="yaml content", 229 | ), # no file loader with "a" as valid start 230 | pytest.param( 231 | "root_from_json_content", 232 | '{"a": {"b": 5}}', 233 | {"root": {"a": {"b": 5}}}, 234 | id="json content", 235 | ), # json must be object, seeing "[" assumes a toml file 236 | pytest.param( 237 | "root_from_toml_content", 238 | "[a]\nb=5", 239 | {"root": {"a": {"b": 5}}}, 240 | id="toml content", 241 | ), # toml file has to start with [ or it is not parsable 242 | pytest.param( 243 | "root_from_toml_content", "[a\nb=5", {}, id="invalid toml syntax" 244 | ), # toml file has to start with [ or it is not parsable 245 | pytest.param( 246 | "root_from_json_content", 247 | '{"a": {"b_from_yaml_content": "c: 6"}}', 248 | {"root": {"a": {"b": {"c": 6}}}}, 249 | id="nested from_content keys", 250 | ), # toml file has to start with [ or it is not parsable 251 | ], 252 | ) 253 | def test_from_content(mock_empty_os_environ, key, content, expected): 254 | """Check parsing file content from different file types raises an error on incorrect file content.""" 255 | climate = core.Climate() 256 | climate.update({key: content}) 257 | assert dict(climate.settings) == expected 258 | 259 | 260 | def test_settings_single_file(mock_empty_os_environ, mock_settings_file, tmpdir): 261 | """Check that setting a the "settings_files" option works correctly.""" 262 | climate = core.Climate(prefix="TEST_STUFF", settings_files=mock_settings_file[0]) 263 | assert isinstance(climate.settings, Mapping) 264 | assert dict(climate.settings) == mock_settings_file[1] 265 | 266 | 267 | def test_settings_multiple_files(mock_empty_os_environ, mock_settings_files, tmpdir): 268 | """Check that setting multiple files as "settings_files" option works correctly.""" 269 | climate = core.Climate(prefix="TEST_STUFF", settings_files=mock_settings_files[0]) 270 | assert isinstance(climate.settings, Mapping) 271 | assert dict(climate.settings) == mock_settings_files[1] 272 | 273 | 274 | def test_settings_multiple_files_with_glob( 275 | mock_empty_os_environ, mock_settings_files, tmpdir, file_extension 276 | ): 277 | """Check that setting multiple files as "settings_files" option works correctly.""" 278 | directory, _ = os.path.split(mock_settings_files[0][0]) 279 | glob_path = directory + os.path.sep + "*" + file_extension 280 | climate = core.Climate(prefix="TEST_STUFF", settings_files=glob_path) 281 | assert isinstance(climate.settings, Mapping) 282 | assert dict(climate.settings) == mock_settings_files[1] 283 | 284 | 285 | def test_settings_env_file_and_env(mock_env_settings_file, tmpdir): 286 | """Check that a settings file from an env variable works together with other env variables settings. 287 | 288 | In the default case environment vars should override settings file vars. 289 | """ 290 | climate = core.Climate(prefix="TEST_STUFF") 291 | assert isinstance(climate.settings, Mapping) 292 | assert dict(climate.settings) == { 293 | "testgroup": {"testvar": 7, "test_var": 6}, 294 | "othergroup": {"blabla": 555}, 295 | "testgroup_test_var": 9, 296 | } 297 | 298 | 299 | def test_settings_multiple_files_and_env(mock_os_environ, mock_settings_files, tmpdir): 300 | """Check that using multiple settings files together with settings parsed from env variables works. 301 | 302 | Each subsequent settings file should override the last and environment vars 303 | should override any settings file vars. 304 | 305 | Additionally check that the logs are fired correctly and have the correct 306 | result. 307 | 308 | """ 309 | climate = core.Climate(prefix="TEST_STUFF", settings_files=mock_settings_files[0]) 310 | assert isinstance(climate.settings, Mapping) 311 | 312 | assert dict(climate.settings) == { 313 | "testgroup": {"test_var": 6, "testvar": 7, "testvar_inline_1": "foo"}, 314 | "othergroup": {"blabla": 555, "testvar_inline_2": "bar"}, 315 | "testgroup_test_var": 9, 316 | } 317 | 318 | expected_fragments = [ 319 | Fragment( 320 | value={ 321 | "testgroup": { 322 | "testvar": 123, 323 | "testvar_inline_1_from_file": str(tmpdir / "sub2" / "secret.txt"), 324 | }, 325 | "othergroup": { 326 | "blabla": 55, 327 | "testvar_inline_2_from_file": str(tmpdir / "sub2" / "secret.txt"), 328 | }, 329 | }, 330 | source=mock_settings_files[0][0], 331 | path=[], 332 | ), 333 | Fragment( 334 | value=core.REMOVED, 335 | source=mock_settings_files[0][0], 336 | path=["testgroup", "testvar_inline_1_from_file"], 337 | ), 338 | Fragment( 339 | value="foo", 340 | source=mock_settings_files[0][0], 341 | path=["testgroup", "testvar_inline_1"], 342 | ), 343 | Fragment( 344 | value=core.REMOVED, 345 | source=mock_settings_files[0][0], 346 | path=["othergroup", "testvar_inline_2_from_file"], 347 | ), 348 | Fragment( 349 | value="foo", 350 | source=mock_settings_files[0][0], 351 | path=["othergroup", "testvar_inline_2"], 352 | ), 353 | Fragment( 354 | value={"othergroup": {"blabla": 555, "testvar_inline_2": "bar"}}, 355 | source=mock_settings_files[0][1], 356 | ), 357 | Fragment( 358 | value=6, 359 | source="ENV:TEST_STUFF_TESTGROUP__TEST_VAR", 360 | path=["testgroup", "test_var"], 361 | ), 362 | Fragment( 363 | value=7, 364 | source="ENV:TEST_STUFF_TESTGROUP__TESTVAR", 365 | path=["testgroup", "testvar"], 366 | ), 367 | Fragment( 368 | value=9, 369 | source="ENV:TEST_STUFF_TESTGROUP_TEST_VAR", 370 | path=["testgroup_test_var"], 371 | ), 372 | ] 373 | 374 | assert len(climate._fragments) == len(expected_fragments) 375 | if sys.version_info[:2] >= (3, 6): # pragma: nocover 376 | # in python < 3.6 dicts are not ordered so we can't be sure what's up here in python 3.5 377 | assert climate._fragments == expected_fragments 378 | 379 | 380 | @pytest.mark.parametrize( 381 | "ending, content", 382 | [ 383 | (".json", '["this", "that"]\n'), 384 | (".yml", "[this, that]\n"), 385 | pytest.param( 386 | ".toml", 387 | "['this', 'that']", 388 | marks=pytest.mark.xfail(reason="toml literal lists are not supported"), 389 | ), 390 | ], 391 | ) 392 | def test_parse_from_file_list(ending, content, mock_os_environ, tmpdir): 393 | """Check that the "from_file" extension works as expected. 394 | 395 | Adding the "from_file" suffix should result in the variable being read from 396 | the file and not directly. In addition, a file ending indicating a 397 | structured format (like json, toml or yaml) should allow the user to read 398 | structures from files (in this case simple lists). 399 | 400 | Note that the toml parser can not (as of 2018-11) interpret top level lists 401 | as such, so specifying a toml file holding only a list will not work as 402 | expected. 403 | 404 | """ 405 | climate = core.Climate() 406 | filepath = tmpdir.join("testvarfile" + ending) 407 | filename = str(filepath) 408 | with open(filename, "w") as f: 409 | f.write(content) 410 | update_dict = {"this_var_from_file": filename} 411 | climate.update(update_dict) 412 | assert isinstance(climate.settings, Mapping) 413 | actual = dict(climate.settings) 414 | expected = {"this_var": ["this", "that"]} 415 | assert actual == expected 416 | 417 | 418 | def test_nested_settings_files(tmpdir): 419 | """Check that parsing of nested "from_file" settings files works as expected. 420 | 421 | In this case a base file references as settings file (nested_1) which in 422 | turn references as second file (nested_2). 423 | 424 | """ 425 | subfolder = tmpdir.mkdir("sub") 426 | p = subfolder.join("settings.json") 427 | nested_1_p = subfolder.join("nested_1.json") 428 | nested_2_p = subfolder.join("nested_2.json") 429 | 430 | nested_2_p.write(json.dumps({"foo": 1, "bar": 2})) 431 | nested_1_p.write(json.dumps({"level_2_from_file": str(nested_2_p)})) 432 | p.write( 433 | json.dumps( 434 | { 435 | "level_1_from_file": str( 436 | nested_1_p 437 | ), # nested_1_p references nested_2_p internally. 438 | "spam": "parrot", 439 | "list": [ 440 | "random", 441 | { 442 | "this_from_file": str( 443 | nested_2_p 444 | ) # dictionaries in lists should be expanded as well. 445 | }, 446 | ], 447 | } 448 | ) 449 | ) 450 | 451 | climate = core.Climate(prefix="TEST_STUFF", settings_files=[str(p)]) 452 | assert dict(climate.settings) == { 453 | "spam": "parrot", 454 | "level_1": {"level_2": {"foo": 1, "bar": 2}}, 455 | "list": ["random", {"this": {"foo": 1, "bar": 2}}], 456 | } 457 | 458 | 459 | def test_multiple_settings_files(tmpdir): 460 | """Check that parsing multiple files on after another works as expected. 461 | 462 | We assume a settings file list with multiple files and expect the files to 463 | be parsed in that order, the latter file overwriting the settings of 464 | earlier files. 465 | 466 | """ 467 | subfolder = tmpdir.mkdir("sub") 468 | p1 = subfolder.join("settings1.json") 469 | p1.write(json.dumps({"foo": "test1"})) 470 | 471 | p2 = subfolder.join("settings2.json") 472 | content = subfolder.join("content.txt") 473 | content.write("test2") 474 | p2.write(json.dumps({"foo_from_file": str(content)})) 475 | 476 | climate = core.Climate(prefix="TEST_STUFF", settings_files=[str(p1), str(p2)]) 477 | assert dict(climate.settings) == {"foo": "test2"} 478 | 479 | p3 = subfolder.join("settings3.json") 480 | p3.write(json.dumps({"foo": "test3"})) 481 | 482 | climate = core.Climate( 483 | prefix="TEST_STUFF", settings_files=[str(p1), str(p2), str(p3)] 484 | ) 485 | assert dict(climate.settings) == {"foo": "test3"} 486 | 487 | 488 | def test_inferred_settings_files(tmpdir, mock_empty_os_environ): 489 | """Check that inferred settings are gathered correctly.""" 490 | climate = core.Climate() 491 | 492 | tmp_path = Path(tmpdir) 493 | # write files into fake project directory tree 494 | (tmp_path / "climatecontrol_settings.yaml").write_text("unused = 5\n") 495 | 496 | project_dir = tmp_path / "myproject" 497 | project_dir.mkdir() 498 | (project_dir / ".git").mkdir() 499 | p_file = project_dir / ".climatecontrol_settings.conf" 500 | p_file.write_text('used = "project dir"\nproject_dir = "yes"') 501 | 502 | subproject_dir = project_dir / "subproject" 503 | subproject_dir.mkdir() 504 | sp_file = subproject_dir / "climatecontrol settings.yaml" 505 | sp_file.write_text("subproject: true\nused: subproject dir") 506 | 507 | package_dir = subproject_dir / "package1" 508 | package_dir.mkdir() 509 | (package_dir / "climatecontrol-settings.json").write_text( 510 | '{"unused": "wrong and unused"}' 511 | ) 512 | 513 | project_settings_dir = tmp_path / "myproject" / ".climatecontrol_settings" 514 | project_settings_dir.mkdir() 515 | (project_settings_dir / "climatecontrol settings").write_text("not used either") 516 | (project_settings_dir / "0").mkdir() 517 | (project_settings_dir / "1").mkdir() 518 | p0_file = project_settings_dir / "0" / "settings.yml" 519 | p1_file = project_settings_dir / "1" / "settings.yml" 520 | p0_file.write_text("p_sub: 0\np_sub0: true") 521 | p1_file.write_text("p_sub: 1\np_sub1: true") 522 | 523 | # Switch to the subpoject directory and compute the inferred files as though the program had been started there. 524 | # At the end we have to make sure that we switch back so that other tests don't get messed up. 525 | os.chdir(subproject_dir) 526 | 527 | # Assert 528 | actual_files = [p.resolve() for p in climate.inferred_settings_files] 529 | expected_files = [p.resolve() for p in [p_file, p0_file, p1_file, sp_file]] 530 | assert actual_files == expected_files 531 | 532 | actual_settings = dict(climate.settings) 533 | assert actual_settings == { 534 | "used": "subproject dir", 535 | "project_dir": "yes", 536 | "subproject": True, 537 | "p_sub": 1, 538 | "p_sub0": True, 539 | "p_sub1": True, 540 | } 541 | 542 | 543 | def mock_parser_fcn(s): 544 | """Return input instead of doing some complex parsing.""" 545 | 546 | 547 | @pytest.mark.parametrize("update", [False, True]) 548 | @pytest.mark.parametrize("envvar", [False, True]) 549 | @pytest.mark.parametrize("clear", [False, True]) 550 | @pytest.mark.parametrize("reload", [False, True]) 551 | def test_update_clear_reload(mock_empty_os_environ, update, envvar, clear, reload): 552 | """Test if updating settings after initialization works.""" 553 | os.environ["THIS_SECTION__MY_VALUE"] = "original" 554 | climate = core.Climate(prefix="this", settings_file_suffix="suffix", parser=None) 555 | original = dict(climate.settings) 556 | assert original == {"section": {"my_value": "original"}} 557 | 558 | expected = original.copy() 559 | if update: 560 | climate.update({"section": {"my_new_value": "value"}}) 561 | if not clear: 562 | expected["section"].update({"my_new_value": "value"}) 563 | if envvar: 564 | os.environ["THIS_SECTION2__NEW_ENV_VALUE"] = "new_env_data" 565 | if reload or clear: 566 | expected.update({"section2": {"new_env_value": "new_env_data"}}) 567 | if clear: 568 | climate.clear() 569 | if reload: 570 | climate.reload() 571 | assert dict(climate.settings) == expected 572 | 573 | 574 | def test_bad_config_recovery(mock_empty_os_environ): 575 | """Check that parsers that cause errors can recover correctly.""" 576 | 577 | def check(d): 578 | if d and "wrong" in d: 579 | raise KeyError("Invalid config") 580 | return d 581 | 582 | climate = core.Climate(prefix="this", settings_file_suffix="suffix", parser=check) 583 | assert dict(climate.settings) == {} 584 | 585 | # Try to set incorrect config 586 | with pytest.raises(KeyError): 587 | climate.update({"wrong": 2}) 588 | assert dict(climate.settings) == {}, "Setting should not have been updated" 589 | assert climate._updates == [], "No external data should have been set." 590 | 591 | # Updating with other fields will still trigger the error 592 | climate.update({"right": 2}) 593 | assert dict(climate.settings) == {"right": 2} 594 | assert climate._updates == [{"right": 2}], "External data should have been set." 595 | 596 | 597 | def test_temporary_changes(): 598 | """Test that temporary changes settings context manager works. 599 | 600 | Within the context, settings should be changeable. After exit, the original 601 | settings should be restored. 602 | 603 | """ 604 | climate = core.Climate() 605 | climate.update({"a": 1}) 606 | with climate.temporary_changes(): 607 | # Change the settings within the context 608 | climate.update({"a": 2, "b": 2}) 609 | climate.settings_files.append("test") 610 | assert climate.settings["a"] == 2 611 | assert len(climate.settings_files) == 1 612 | # Check that outside of the context the settings are back to their old state. 613 | assert climate.settings["a"] == 1 614 | assert len(climate.settings_files) == 0 615 | 616 | 617 | @pytest.mark.parametrize("use_method", [True, False]) 618 | @pytest.mark.parametrize("option_name", ["config", "settings"]) 619 | @pytest.mark.parametrize("mode", ["config", "noconfig", "wrongfile"]) 620 | def test_cli_utils( 621 | mock_empty_os_environ, mock_settings_file, mode, option_name, use_method 622 | ): 623 | """Check that cli utils work.""" 624 | climate = core.Climate(prefix="TEST_STUFF") 625 | # test equality here as _data is not only NoneType but also a proxy so "is" comparison would alwas evaluate to false. 626 | assert isinstance(climate._data, type(None)) 627 | 628 | if use_method: 629 | opt = cli_utils.click_settings_file_option(climate, option_name=option_name) 630 | else: 631 | opt = climate.click_settings_file_option(option_name=option_name) 632 | 633 | @click.command() 634 | @opt 635 | def tmp_cli(): 636 | pass 637 | 638 | runner = CliRunner() 639 | if mode == "config": 640 | args = ["--" + option_name, mock_settings_file[0]] 641 | result = runner.invoke(tmp_cli, args) 642 | assert dict(climate.settings) == mock_settings_file[1] 643 | assert result.exit_code == 0 644 | elif mode == "noconfig": 645 | args = [] 646 | result = runner.invoke(tmp_cli, args) 647 | assert dict(climate.settings) == {} 648 | assert result.exit_code == 0 649 | elif mode == "wrongfile": 650 | args = ["--" + option_name, "badlfkjasfkj"] 651 | result = runner.invoke(tmp_cli, args) 652 | assert result.exit_code == 2 653 | expected_output = ( 654 | "Usage: tmp-cli [OPTIONS]\n" 655 | "Try 'tmp-cli --help' for help.\n\n" 656 | "Error: Invalid value for '--{}' / '-{}': " 657 | "File 'badlfkjasfkj' does not exist." 658 | "\n" 659 | ).format(option_name, option_name[0]) 660 | assert result.output == expected_output 661 | else: # pragma: nocover 662 | assert False, "Incorrect mode" 663 | 664 | 665 | def test_settings_items(mock_empty_os_environ): 666 | """Test that item selection, assignment and deletion work as expected.""" 667 | climate = core.Climate() 668 | climate.update({"a": {"b": {"c": [1, 2, 3]}}, "d": [{"e": "f"}, {"g": "h"}]}) 669 | assert climate.settings["a"] == {"b": {"c": [1, 2, 3]}} 670 | assert climate.settings.a == {"b": {"c": [1, 2, 3]}} 671 | assert climate.settings.a.b.c[0] == 1 672 | 673 | # test assignment 674 | for value in [{"new": "data"}, "blaaa", [3, 4, 5]]: 675 | with pytest.raises(TypeError): 676 | climate.settings.a.b.c = value 677 | climate.update({"a": {"b": {"c": value}}}) 678 | assert climate.settings.a.b.c == value 679 | 680 | for value in [{"new": "data"}, "blaaa", 100]: # type: ignore[assignment] 681 | with pytest.raises(TypeError): 682 | climate.settings.a.b.c[0] = value 683 | climate.update({"a": {"b": {"c": [value]}}}) 684 | assert climate.settings.a.b.c[0] == value 685 | 686 | # test deletion 687 | with pytest.raises(TypeError): 688 | del climate.settings.a.b["c"] 689 | climate.update({"a": {"b": {"c": core.REMOVED}}}) 690 | assert climate.settings.a.b == {} 691 | climate.update() 692 | assert climate.settings.a.b == {} 693 | 694 | # test attribute deletion 695 | with pytest.raises(TypeError): 696 | del climate.settings.d[0].e 697 | climate.update({"d": [{"e": core.REMOVED}]}) 698 | assert climate.settings.d == [{}, {"g": "h"}] 699 | climate.update() 700 | assert climate.settings.d == [{}, {"g": "h"}] 701 | 702 | # test sequence item deletion 703 | climate.update({"d": [core.REMOVED]}) 704 | assert climate.settings.d == [{"g": "h"}] 705 | climate.update() 706 | assert climate.settings.d == [{"g": "h"}] 707 | 708 | # test second deletion at index to make sure that it is applied after the previous deletion 709 | climate.update({"d": [core.REMOVED]}) 710 | assert climate.settings.d == [] 711 | climate.update() 712 | assert climate.settings.d == [] 713 | 714 | 715 | @pytest.mark.parametrize("update", [False, "manual", "env"]) 716 | def test_setup_logging(monkeypatch, update, mock_empty_os_environ): 717 | """Check that the setup_logging method intializes the logger and respects updates.""" 718 | mock_dict_config = MagicMock() 719 | monkeypatch.setattr( 720 | "climatecontrol.core.logging_config.dictConfig", mock_dict_config 721 | ) 722 | if update == "env": 723 | os.environ["TEST_STUFF_LOGGING__ROOT__LEVEL"] = "DEBUG" 724 | climate = core.Climate(prefix="TEST_STUFF") 725 | if update == "manual": 726 | climate.update({"logging": {"root": {"level": "DEBUG"}}}) 727 | climate.update() 728 | climate.setup_logging() 729 | assert mock_dict_config.call_count == 1 730 | assert ( 731 | "handlers" in mock_dict_config.call_args[0][0]["root"] 732 | ), "Expected default logging configuration to be updated, not overwritten." 733 | assert ( 734 | mock_dict_config.call_args[0][0]["root"]["level"] == "DEBUG" 735 | if update 736 | else "INFO" 737 | ) 738 | 739 | 740 | def test_update_log(caplog, mock_empty_os_environ, mock_settings_file): 741 | """Test writing out an example configuration file.""" 742 | settings_file_path, expected = mock_settings_file 743 | climate = core.Climate(prefix="TEST_STUFF", settings_files=(settings_file_path,)) 744 | assert climate.update_log == "", "before updating, the update log should be empty" 745 | climate.update({"a": core.REMOVED, "b": 2}) 746 | lines = climate.update_log.split("\n") 747 | assert len(lines) == 4 748 | expected_lines = [ 749 | "loaded testgroup.testvar from {}".format(settings_file_path), 750 | "loaded othergroup.blabla from {}".format(settings_file_path), 751 | "removed a from external", 752 | "loaded b from external", 753 | ] 754 | assert set(lines) == set(expected_lines), "Unexpected lines in update_log" 755 | --------------------------------------------------------------------------------