├── tests ├── __init__.py ├── conftest.py ├── test_patterns.py ├── test_paths.py ├── test_toml_loading.py ├── test_functional.py ├── test_node.py ├── test_mapping.py └── test_config.py ├── pytest.ini ├── .gitignore ├── docs ├── changes.rst ├── license.rst ├── index.rst ├── conf.py ├── installation.rst ├── api.rst ├── development.rst ├── Makefile ├── mapping.rst ├── patterns.rst └── use.rst ├── .readthedocs.yml ├── .carthorse.yml ├── configurator ├── __init__.py ├── _toml.py ├── parsers.py ├── patterns.py ├── merge.py ├── mapping.py ├── path.py ├── config.py └── node.py ├── .circleci └── config.yml ├── conftest.py ├── LICENSE.txt ├── setup.py ├── CHANGELOG.rst └── README.rst /tests/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /pytest.ini: -------------------------------------------------------------------------------- 1 | [pytest] 2 | addopts = -p no:doctest 3 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | /bin 2 | /*.egg-info 3 | /include 4 | /lib 5 | .coverage* 6 | _build/ 7 | /.cache 8 | -------------------------------------------------------------------------------- /docs/changes.rst: -------------------------------------------------------------------------------- 1 | .. py:currentmodule:: configurator 2 | 3 | .. include:: ../CHANGELOG.rst 4 | -------------------------------------------------------------------------------- /docs/license.rst: -------------------------------------------------------------------------------- 1 | ======= 2 | License 3 | ======= 4 | 5 | .. literalinclude:: ../LICENSE.txt 6 | 7 | -------------------------------------------------------------------------------- /tests/conftest.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from testfixtures import TempDirectory 3 | 4 | 5 | @pytest.fixture 6 | def dir(): 7 | with TempDirectory(encoding='ascii') as d: 8 | yield d 9 | -------------------------------------------------------------------------------- /.readthedocs.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | 3 | build: 4 | os: ubuntu-22.04 5 | tools: 6 | python: "3" 7 | 8 | python: 9 | install: 10 | - method: pip 11 | path: . 12 | extra_requirements: 13 | - build 14 | sphinx: 15 | fail_on_warning: true 16 | -------------------------------------------------------------------------------- /.carthorse.yml: -------------------------------------------------------------------------------- 1 | carthorse: 2 | version-from: setup.py 3 | when: 4 | - version-not-tagged 5 | actions: 6 | - run: "pip install -e .[build]" 7 | - run: "python setup.py sdist bdist_wheel" 8 | - run: "twine upload -u __token__ -p $PYPI_TOKEN dist/*" 9 | - create-tag 10 | -------------------------------------------------------------------------------- /configurator/__init__.py: -------------------------------------------------------------------------------- 1 | from .config import Config 2 | from .merge import default_mergers 3 | from .mapping import source, target, convert, required, if_supplied, value 4 | 5 | __all__ = ( 6 | 'Config', 7 | 'source', 8 | 'target', 9 | 'convert', 10 | 'required', 11 | 'if_supplied', 12 | 'value', 13 | 'default_mergers', 14 | ) 15 | -------------------------------------------------------------------------------- /configurator/_toml.py: -------------------------------------------------------------------------------- 1 | import io 2 | 3 | try: 4 | import tomllib 5 | except ImportError: 6 | import tomli as tomllib 7 | 8 | 9 | def load(f, *, parse_float=float): 10 | # wrapper around tomllib.load to be more forgiving of streams opened in text mode 11 | if isinstance(f, io.TextIOWrapper): 12 | return tomllib.load(f.buffer, parse_float=parse_float) 13 | elif isinstance(f, io.StringIO): 14 | return tomllib.loads(f.getvalue(), parse_float=parse_float) 15 | else: 16 | return tomllib.load(f, parse_float=parse_float) 17 | -------------------------------------------------------------------------------- /docs/index.rst: -------------------------------------------------------------------------------- 1 | .. include:: ../README.rst 2 | 3 | Documentation 4 | ~~~~~~~~~~~~~ 5 | 6 | .. toctree:: 7 | :maxdepth: 3 8 | 9 | use.rst 10 | mapping.rst 11 | patterns.rst 12 | 13 | If you're looking for a description of a particular class or method, please see 14 | the API reference: 15 | 16 | .. toctree:: 17 | :maxdepth: 1 18 | 19 | api.rst 20 | 21 | For details of how to install the package or get involved in its 22 | development, please see the sections below: 23 | 24 | .. toctree:: 25 | :maxdepth: 1 26 | 27 | installation.rst 28 | development.rst 29 | changes.rst 30 | license.rst 31 | 32 | Indices and tables 33 | ================== 34 | 35 | * :ref:`genindex` 36 | * :ref:`modindex` 37 | * :ref:`search` 38 | 39 | -------------------------------------------------------------------------------- /tests/test_patterns.py: -------------------------------------------------------------------------------- 1 | import json 2 | 3 | from testfixtures import compare 4 | 5 | from configurator import Config 6 | from configurator.patterns import load_with_extends 7 | 8 | 9 | class TestLoadWithExtends: 10 | 11 | def test_key_not_present(self, dir): 12 | path = dir.write('file.json', '{"key":"value"}') 13 | compare(load_with_extends(path, key='extends'), expected=Config({'key': 'value'})) 14 | 15 | def test_root(self, dir): 16 | path1 = dir.write('file1.json', '{"root": {"f1key":"f1value"}}') 17 | path2 = dir.write('file2.json', json.dumps({'root': { 18 | 'f2key': 'f2value', 19 | 'extends': path1, 20 | }})) 21 | compare(load_with_extends(path2, key='extends', root='root'), expected=Config({ 22 | 'f1key': 'k1value', 23 | 'f2key': 'f2value', 24 | })) 25 | -------------------------------------------------------------------------------- /configurator/parsers.py: -------------------------------------------------------------------------------- 1 | from collections import defaultdict 2 | from importlib import import_module 3 | 4 | 5 | class ParseError(Exception): 6 | """ 7 | The exception raised when an appropriate parser cannot be found for a config 8 | stream. 9 | """ 10 | 11 | 12 | class Parsers(defaultdict): 13 | 14 | # file extension: module name, method name 15 | supported = { 16 | 'json': ('json', 'load'), 17 | 'toml': ('configurator._toml', 'load'), 18 | 'yml': ('yaml', 'safe_load'), 19 | 'yaml': ('yaml', 'safe_load'), 20 | } 21 | 22 | def __missing__(self, extension): 23 | try: 24 | module_name, parser_name = self.supported[extension] 25 | except KeyError: 26 | raise ParseError('No parser found for {!r}'.format(extension)) 27 | else: 28 | module = import_module(module_name) 29 | return getattr(module, parser_name) 30 | -------------------------------------------------------------------------------- /.circleci/config.yml: -------------------------------------------------------------------------------- 1 | version: 2.1 2 | 3 | orbs: 4 | python: cjw296/python-ci@3.1 5 | 6 | common: &common 7 | jobs: 8 | - python/pip-run-tests: 9 | matrix: 10 | parameters: 11 | image: 12 | - cimg/python:3.6 13 | - cimg/python:3.11 14 | extras: 15 | - "[test]" 16 | - "[test,toml,yaml]" 17 | 18 | - python/coverage: 19 | name: coverage 20 | requires: 21 | - python/pip-run-tests 22 | 23 | - python/release: 24 | name: release 25 | config: .carthorse.yml 26 | requires: 27 | - coverage 28 | filters: 29 | branches: 30 | only: master 31 | 32 | workflows: 33 | push: 34 | <<: *common 35 | periodic: 36 | <<: *common 37 | triggers: 38 | - schedule: 39 | cron: "0 0 * * 5" 40 | filters: 41 | branches: 42 | only: master 43 | -------------------------------------------------------------------------------- /conftest.py: -------------------------------------------------------------------------------- 1 | from doctest import REPORT_NDIFF, ELLIPSIS 2 | 3 | import pytest 4 | from pyfakefs.pytest_plugin import fs_module as fs 5 | from sybil import Sybil 6 | from sybil.parsers.doctest import DocTestParser 7 | from sybil.parsers.codeblock import PythonCodeBlockParser 8 | from testfixtures import Replacer, TempDirectory 9 | from testfixtures.sybil import FileParser 10 | 11 | 12 | @pytest.fixture() 13 | def tempdir(fs): 14 | with TempDirectory(path='/') as d: 15 | yield d 16 | 17 | 18 | @pytest.fixture(scope='module') 19 | def replace(): 20 | with Replacer() as replace: 21 | yield replace 22 | 23 | 24 | @pytest.fixture() 25 | def skip_no_yaml(): 26 | pytest.importorskip("yaml") 27 | 28 | 29 | pytest_collect_file = Sybil( 30 | parsers=[ 31 | DocTestParser(optionflags=REPORT_NDIFF|ELLIPSIS), 32 | PythonCodeBlockParser(), 33 | FileParser('tempdir') 34 | ], 35 | pattern='*.rst', 36 | fixtures=['fs', 'replace', 'tempdir', 'skip_no_yaml'], 37 | ).pytest() 38 | -------------------------------------------------------------------------------- /docs/conf.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | import datetime, os, pkg_resources 3 | 4 | intersphinx_mapping = { 5 | 'python': ('https://docs.python.org/3/', None), 6 | } 7 | 8 | extensions = [ 9 | 'sphinx.ext.autodoc', 10 | 'sphinx.ext.intersphinx' 11 | ] 12 | 13 | # General 14 | source_suffix = '.rst' 15 | master_doc = 'index' 16 | project = 'configurator' 17 | copyright = '2011-2014 Simplistix Ltd, 2016-%s Chris Withers' % datetime.datetime.now().year 18 | version = release = pkg_resources.get_distribution(project).version 19 | exclude_patterns = [ 20 | 'description.rst', 21 | '_build' 22 | ] 23 | pygments_style = 'sphinx' 24 | 25 | # Options for HTML output 26 | html_theme = 'furo' 27 | htmlhelp_basename = project+'doc' 28 | 29 | # Options for LaTeX output 30 | latex_documents = [ 31 | ('index',project+'.tex', project+u' Documentation', 32 | 'Chris Withers', 'manual'), 33 | ] 34 | 35 | nitpicky = True 36 | nitpick_ignore = [ 37 | ('py:func', 'configurator.merge.merge_dict') # private method, may go away soon? 38 | ] 39 | -------------------------------------------------------------------------------- /configurator/patterns.py: -------------------------------------------------------------------------------- 1 | from . import Config 2 | 3 | 4 | def load_with_extends(path, key='extends', root=None): 5 | """ 6 | Helper for the :ref:`"extends" ` pattern. 7 | 8 | :param path: 9 | The path of the configuration file to start with. 10 | :param key: 11 | The key to use to indicate that another file should be used as a base. 12 | :param root: 13 | If supplied, configuration is extracted from this key at the root of 14 | each configuration file that is loaded, provided it is present. If missing 15 | from any file, the whole configuration from that file is used instead. 16 | """ 17 | configs = [] 18 | while path: 19 | config = Config.from_path(path) 20 | if root is not None and root in config: 21 | config = config[root] 22 | configs.append(config) 23 | path = config.get(key) 24 | config = Config() 25 | for layer in reversed(configs): 26 | config.merge(layer) 27 | config.data.pop(key, None) 28 | return config 29 | -------------------------------------------------------------------------------- /docs/installation.rst: -------------------------------------------------------------------------------- 1 | .. py:currentmodule:: configurator 2 | 3 | Installation 4 | ============ 5 | 6 | Configurator is available on the `Python Package Index`__ and can be installed 7 | with any tools for managing Python environments. The package has no hard 8 | dependencies beyond the standard library, but you will need extra libraries for most 9 | file formats from which you may want to read configuration information. As a result, 10 | you may wish to install Configurator with the appropriate extra requirement to meet 11 | your needs: 12 | 13 | __ https://pypi.org 14 | 15 | .. code-block:: bash 16 | 17 | pip install configurator[toml] 18 | pip install configurator[yaml] 19 | 20 | Configurator is also available as a conda package installable from `conda-forge`__: 21 | 22 | __ https://anaconda.org/conda-forge/configurator 23 | 24 | .. code-block:: bash 25 | 26 | conda install -c conda-forge configurator 27 | 28 | .. note:: 29 | 30 | Conda does not support the notion of "optional extras" so you will need to manually 31 | install the package(s) required to parse the config file formats you need. 32 | 33 | -------------------------------------------------------------------------------- /LICENSE.txt: -------------------------------------------------------------------------------- 1 | Copyright (c) 2011-2014 Simplistix Ltd, 2016 onwards Chris Withers 2 | 3 | Permission is hereby granted, free of charge, to any person 4 | obtaining a copy of this software and associated documentation 5 | files (the "Software"), to deal in the Software without restriction, 6 | including without limitation the rights to use, copy, modify, merge, 7 | publish, distribute, sublicense, and/or sell copies of the Software, 8 | and to permit persons to whom the Software is furnished to do so, 9 | subject to the following conditions: 10 | 11 | The above copyright notice and this permission notice shall be 12 | included in all copies or substantial portions of the Software. 13 | 14 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, 15 | EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES 16 | OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND 17 | NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS 18 | BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN 19 | ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN 20 | CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /tests/test_paths.py: -------------------------------------------------------------------------------- 1 | from testfixtures import compare 2 | 3 | from configurator.mapping import source, target, required, convert, value 4 | from configurator.path import parse_text 5 | 6 | 7 | class TestPaths: 8 | 9 | def test_repr(self): 10 | compare(repr(source), expected="Path:source") 11 | compare(repr(target), expected="Path:target") 12 | 13 | def test_str(self): 14 | compare(str(source), expected="source") 15 | compare(str(target), expected="target") 16 | 17 | def test_repr_nested(self): 18 | compare(repr( 19 | required(convert(source['foo'].y, int)).insert(0).append().merge() 20 | ), expected=( 21 | "Path:required(convert(source['foo'].y, int)).insert(0).append().merge()" 22 | )) 23 | 24 | def test_str_nested(self): 25 | compare(str( 26 | required(convert(source['foo'].y, int)).insert(0).append().merge() 27 | ), expected=( 28 | "required(convert(source['foo'].y, int)).insert(0).append().merge()" 29 | )) 30 | 31 | def test_repr_value(self): 32 | compare(str(value(42)), expected="value(42)") 33 | 34 | def test_text_op(self): 35 | compare(str(parse_text('x.y.z')), expected='x.y.z') 36 | 37 | def test_convert_no_name(self): 38 | o = object() 39 | compare(str(convert(source, o)), expected=( 40 | "convert(source, {!r})".format(o) 41 | )) 42 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | # See LICENSE.txt for license details. 2 | # Copyright (c) 2011-2014 Simplistix Ltd, 2016 onwards Chris Withers 3 | from setuptools import setup, find_packages 4 | 5 | setup( 6 | name='configurator', 7 | version='3.2.0', 8 | author='Chris Withers', 9 | author_email='chris@withers.org', 10 | license='MIT', 11 | description=( 12 | "A library for building a configuration store " 13 | "from one or more layered configuration sources" 14 | ), 15 | long_description=open('README.rst').read(), 16 | url='https://github.com/Simplistix/configurator', 17 | classifiers=[ 18 | 'Intended Audience :: Developers', 19 | 'License :: OSI Approved :: MIT License', 20 | 'Programming Language :: Python :: 3', 21 | ], 22 | packages=find_packages(exclude=["tests"]), 23 | zip_safe=False, 24 | include_package_data=True, 25 | python_requires=">=3.6", 26 | extras_require=dict( 27 | yaml=['pyyaml'], 28 | toml=['tomli; python_version < "3.11"'], 29 | test=[ 30 | 'jinja2', 31 | 'mock', 32 | 'pyfakefs', 33 | 'pytest', 34 | 'pytest-cov', 35 | 'requests', 36 | 'sybil', 37 | 'testfixtures', 38 | 'voluptuous; python_version >= "3.7"', 39 | 'voluptuous<0.14; python_version < "3.7"', 40 | ], 41 | build=['sphinx', 'furo', 'setuptools-git', 'twine', 'wheel'] 42 | ), 43 | ) 44 | -------------------------------------------------------------------------------- /configurator/merge.py: -------------------------------------------------------------------------------- 1 | 2 | 3 | def merge_dict(context, source, target): 4 | result = target.copy() 5 | for key, source_value in source.items(): 6 | if key in result: 7 | target_value = result[key] 8 | else: 9 | target_value = type(source_value)() 10 | try: 11 | value = context.merge(source_value, target_value) 12 | except TypeError: 13 | # can't merge, so overwrite 14 | value = source_value 15 | result[key] = value 16 | return result 17 | 18 | 19 | def merge_list(context, source, target): 20 | return target + source 21 | 22 | 23 | class MergeableDict(dict): 24 | 25 | def __add__(self, other): 26 | result = self.copy() 27 | result.update(other) 28 | return result 29 | 30 | 31 | default_mergers = MergeableDict({ 32 | dict: merge_dict, 33 | list: merge_list, 34 | }) 35 | 36 | 37 | class MergeContext: 38 | 39 | mergers = default_mergers 40 | 41 | def __init__(self, mergers=None): 42 | if mergers is not None: 43 | self.mergers = mergers 44 | 45 | def merge(self, source, target): 46 | source_type = type(source) 47 | target_type = type(target) 48 | merger = self.mergers.get(target_type) 49 | if source_type is not target_type or merger is None: 50 | raise TypeError('Cannot merge {} with {}'.format( 51 | source_type, target_type 52 | )) 53 | return merger(self, source, target) 54 | -------------------------------------------------------------------------------- /docs/api.rst: -------------------------------------------------------------------------------- 1 | API Reference 2 | ============= 3 | 4 | Configuration 5 | ------------- 6 | 7 | .. autoclass:: configurator.Config 8 | :members: 9 | :member-order: bysource 10 | :show-inheritance: 11 | :special-members: 12 | :exclude-members: __weakref__, __init__ 13 | 14 | 15 | .. autoclass:: configurator.node.ConfigNode 16 | :members: 17 | :member-order: bysource 18 | :special-members: 19 | :exclude-members: __init__ 20 | 21 | .. autoclass:: configurator.parsers.ParseError 22 | 23 | 24 | Mapping and Merging 25 | ------------------- 26 | 27 | .. attribute:: configurator.source 28 | 29 | The root generative source :class:`~configurator.path.Path` for 30 | creating :doc:`mappings `. 31 | 32 | .. attribute:: configurator.target 33 | 34 | The root generative target :class:`~configurator.path.Path` for 35 | creating :doc:`mappings `. 36 | 37 | .. autofunction:: configurator.convert 38 | 39 | .. autofunction:: configurator.required 40 | 41 | .. autofunction:: configurator.if_supplied 42 | 43 | .. autofunction:: configurator.value 44 | 45 | .. autoclass:: configurator.path.Path 46 | :members: 47 | :member-order: bysource 48 | :special-members: 49 | :exclude-members: __weakref__, __str__, __repr__, __init__ 50 | 51 | .. attribute:: configurator.default_mergers 52 | 53 | The default set of mergers, which recursively merge :class:`dicts ` 54 | using the union of their keys and merge :class:`lists ` by appending 55 | the contents of the new list to the existing list. 56 | 57 | 58 | Patterns of use 59 | --------------- 60 | 61 | .. automodule:: configurator.patterns 62 | :members: 63 | -------------------------------------------------------------------------------- /docs/development.rst: -------------------------------------------------------------------------------- 1 | Development 2 | =========== 3 | 4 | .. highlight:: bash 5 | 6 | The latest development version of the documentation can be found here: 7 | 8 | http://configurator.readthedocs.org/en/latest/ 9 | 10 | If you wish to contribute to this project, then you should fork the 11 | repository found here: 12 | 13 | https://github.com/Simplistix/configurator/ 14 | 15 | Once that has been done and you have a checkout, you can follow these 16 | instructions to perform various development tasks: 17 | 18 | Setting up a virtualenv 19 | ----------------------- 20 | 21 | The recommended way to set up a development environment is to turn 22 | your checkout into a virtualenv and then install the package in 23 | editable form as follows:: 24 | 25 | $ virtualenv . 26 | $ bin/pip install -U -e .[test,build] 27 | 28 | Running the tests 29 | ----------------- 30 | 31 | Once you've set up a virtualenv, the tests can be run as follows:: 32 | 33 | $ bin/pytest 34 | 35 | Building the documentation 36 | -------------------------- 37 | 38 | The Sphinx documentation is built by doing the following from the 39 | directory containing setup.py:: 40 | 41 | $ source bin/activate 42 | $ cd docs 43 | $ make html 44 | 45 | To check that the description that will be used on PyPI renders properly, 46 | do the following:: 47 | 48 | $ python setup.py --long-description | rst2html.py > desc.html 49 | 50 | The resulting ``desc.html`` should be checked by opening in a browser. 51 | 52 | To check that the README that will be used on GitHub renders properly, 53 | do the following:: 54 | 55 | $ cat README.rst | rst2html.py > readme.html 56 | 57 | The resulting ``readme.html`` should be checked by opening in a browser. 58 | 59 | Making a release 60 | ---------------- 61 | 62 | To make a release, just update the version in ``setup.py``, 63 | update the change log, 64 | and push to https://github.com/Simplistix/configurator 65 | and Carthorse should take care of the rest. 66 | -------------------------------------------------------------------------------- /tests/test_toml_loading.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from testfixtures import compare 3 | 4 | from configurator import Config 5 | 6 | 7 | def test_load_toml_from_path_implicit_parser(tmp_path): 8 | path = tmp_path / 'test.toml' 9 | path.write_bytes(b'k = "v"') 10 | config = Config.from_path(path) 11 | compare(config.k, "v") 12 | 13 | 14 | def test_load_toml_from_path_explicit_parser(tmp_path): 15 | path = tmp_path / 'test.toml' 16 | path.write_bytes(b'k = "v"') 17 | parser = Config.parsers['toml'] 18 | config = Config.from_path(path, parser) 19 | compare(config.k, "v") 20 | 21 | 22 | def test_load_toml_from_byte_stream_implicit_parser(tmp_path): 23 | path = tmp_path / 'test.toml' 24 | path.write_bytes(b'k = "v"') 25 | with path.open(mode="rb") as stream: 26 | config = Config.from_stream(stream) 27 | compare(config.k, "v") 28 | 29 | 30 | def test_load_toml_from_byte_stream_explicit_parser(tmp_path): 31 | path = tmp_path / 'test.toml' 32 | path.write_bytes(b'k = "v"') 33 | parser = Config.parsers['toml'] 34 | with path.open(mode="rb") as stream: 35 | config = Config.from_stream(stream, parser) 36 | compare(config.k, "v") 37 | 38 | 39 | def test_load_toml_from_text_stream_implicit_parser(tmp_path): 40 | path = tmp_path / 'test.toml' 41 | path.write_bytes(b'k = "v"') 42 | with path.open(mode="rt") as stream: 43 | config = Config.from_stream(stream) 44 | compare(config.k, "v") 45 | 46 | 47 | def test_load_toml_from_text_stream_explicit_parser(tmp_path): 48 | path = tmp_path / 'test.toml' 49 | path.write_bytes(b'k = "v"') 50 | parser = Config.parsers['toml'] 51 | with path.open(mode="rt") as stream: 52 | config = Config.from_stream(stream, parser) 53 | compare(config.k, "v") 54 | 55 | 56 | def test_load_toml_from_text(): 57 | parser = Config.parsers['toml'] 58 | config = Config.from_text('k = "v"', parser) 59 | compare(config.k, "v") 60 | 61 | 62 | def test_load_toml_from_bytes(): 63 | parser = Config.parsers['toml'] 64 | config = Config.from_text(b'k = "v"', parser) 65 | compare(config.k, "v") 66 | -------------------------------------------------------------------------------- /configurator/mapping.py: -------------------------------------------------------------------------------- 1 | from .path import ( 2 | Path, parse_text, ConvertOp, RequiredOp, NotPresent, IfSuppliedOp, ValueOp 3 | ) 4 | 5 | 6 | def load(data, path): 7 | path = parse_text(path) 8 | for op in path.ops: 9 | if isinstance(data, NotPresent): 10 | op.not_present(data) 11 | else: 12 | data = op.get(data) 13 | return data 14 | 15 | 16 | def convert(source, callable_): 17 | """ 18 | A :doc:`mapping ` operation that indicates the source value 19 | should be converted by calling ``callable_`` with the original value 20 | and then using that result from that point in the mapping operation 21 | onwards. 22 | """ 23 | source = parse_text(source) 24 | return source._extend(ConvertOp(callable_)) 25 | 26 | 27 | def required(source): 28 | """ 29 | A :doc:`mapping ` operation that indicates the source value 30 | is required. If it is not present, the exception that occurred when 31 | trying to obtain it will be raised. 32 | """ 33 | source = parse_text(source) 34 | return source._extend(RequiredOp()) 35 | 36 | 37 | def if_supplied(source, false_values=(None, '')): 38 | """ 39 | A :doc:`mapping ` operation that indicates the source value 40 | should be treated as not present if its value is in the supplied 41 | list of ``false_values``. 42 | """ 43 | source = parse_text(source) 44 | return source._extend(IfSuppliedOp(false_values)) 45 | 46 | 47 | def value(value): 48 | """ 49 | A :doc:`mapping ` operation that provides a literal source value. 50 | """ 51 | return Path('', ValueOp(value)) 52 | 53 | 54 | def store(data, path, value, merge_context=None): 55 | path = parse_text(path) 56 | if not path.ops: 57 | raise TypeError('Cannot store at root') 58 | stack = [data] 59 | for op in path.ops[:-1]: 60 | stack.append(op.ensure(stack[-1])) 61 | if not isinstance(value, NotPresent): 62 | data = path.ops[-1].set(stack[-1], value, merge_context) 63 | if data is not None: 64 | # uh oh, we have to replace the upstream object: 65 | if len(stack) < 2: 66 | stack[0] = data 67 | else: 68 | path.ops[-2].set(stack[-2], data, merge_context) 69 | return stack[0] 70 | 71 | 72 | source = Path('source') 73 | target = Path('target') 74 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line. 5 | SPHINXOPTS = 6 | SPHINXBUILD = sphinx-build 7 | PAPER = 8 | 9 | # Internal variables. 10 | PAPEROPT_a4 = -D latex_paper_size=a4 11 | PAPEROPT_letter = -D latex_paper_size=letter 12 | ALLSPHINXOPTS = -d _build/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . 13 | 14 | .PHONY: help clean html dirhtml pickle json htmlhelp qthelp latex changes linkcheck doctest 15 | 16 | help: 17 | @echo "Please use \`make ' where is one of" 18 | @echo " html to make standalone HTML files" 19 | @echo " dirhtml to make HTML files named index.html in directories" 20 | @echo " pickle to make pickle files" 21 | @echo " json to make JSON files" 22 | @echo " htmlhelp to make HTML files and a HTML help project" 23 | @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" 24 | @echo " changes to make an overview of all changed/added/deprecated items" 25 | @echo " linkcheck to check all external links for integrity" 26 | @echo " doctest to run all doctests embedded in the documentation (if enabled)" 27 | 28 | clean: 29 | -rm -rf _build/* 30 | 31 | html: 32 | $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) _build/html 33 | @echo 34 | @echo "Build finished. The HTML pages are in _build/html." 35 | 36 | dirhtml: 37 | $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) _build/dirhtml 38 | @echo 39 | @echo "Build finished. The HTML pages are in _build/dirhtml." 40 | 41 | pickle: 42 | $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) _build/pickle 43 | @echo 44 | @echo "Build finished; now you can process the pickle files." 45 | 46 | json: 47 | $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) _build/json 48 | @echo 49 | @echo "Build finished; now you can process the JSON files." 50 | 51 | htmlhelp: 52 | $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) _build/htmlhelp 53 | @echo 54 | @echo "Build finished; now you can run HTML Help Workshop with the" \ 55 | ".hhp project file in _build/htmlhelp." 56 | 57 | latex: 58 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) _build/latex 59 | @echo 60 | @echo "Build finished; the LaTeX files are in _build/latex." 61 | @echo "Run \`make all-pdf' or \`make all-ps' in that directory to" \ 62 | "run these through (pdf)latex." 63 | 64 | changes: 65 | $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) _build/changes 66 | @echo 67 | @echo "The overview file is in _build/changes." 68 | 69 | linkcheck: 70 | $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) _build/linkcheck 71 | @echo 72 | @echo "Link check complete; look for any errors in the above output " \ 73 | "or in _build/linkcheck/output.txt." 74 | 75 | doctest: 76 | $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) _build/doctest 77 | @echo "Testing of doctests in the sources finished, look at the " \ 78 | "results in _build/doctest/output.txt." 79 | -------------------------------------------------------------------------------- /tests/test_functional.py: -------------------------------------------------------------------------------- 1 | from argparse import ArgumentParser 2 | 3 | from testfixtures import compare 4 | from configurator import Config 5 | from configurator.mapping import target, convert 6 | import pytest 7 | 8 | 9 | class TestFunctional: 10 | 11 | def test_layered(self, dir): 12 | # defaults 13 | config = Config({ 14 | 'database': {'user': 'foo'}, 15 | 'special': False 16 | }) 17 | # from system file: 18 | path = dir.write('etc/myapp.json', '{"special": true}') 19 | config.merge(Config.from_path(path)) 20 | # from user file: 21 | path = dir.write('home/user/myapp.json', '{"database": {"password": "123"}}') 22 | config.merge(Config.from_path(path)) 23 | # end result: 24 | compare(config.database.user, expected='foo') 25 | compare(config.database.password, expected='123') 26 | compare(config.special, expected=True) 27 | 28 | def test_defaults_and_argparse(self): 29 | parser = ArgumentParser() 30 | parser.add_argument('--first-url') 31 | parser.add_argument('--attempts', type=int, default=2) 32 | parser.add_argument('--bool-a', action='store_true') 33 | parser.add_argument('--bool-b', action='store_false') 34 | 35 | args = parser.parse_args(['--first-url', 'override_url', '--bool-a']) 36 | compare(args.bool_b, expected=True) 37 | 38 | config = Config({'urls': ['default_url'], 'bool_b': False}) 39 | 40 | config.merge(args, { 41 | 'first_url': target['urls'].insert(0), 42 | 'attempts': target['attempts'], 43 | 'bool_a': target['bool_a'], 44 | 'bool_b': target['bool_b'], 45 | }) 46 | 47 | compare(config.urls, expected=['override_url', 'default_url']) 48 | compare(config.attempts, expected=2) 49 | compare(config.bool_a, expected=True) 50 | compare(config.bool_b, expected=True) 51 | 52 | def test_defaults_and_env(self): 53 | config = Config({'present': 'dp', 'absent': 'da'}) 54 | 55 | environ = {'ENV_PRESENT': '1'} 56 | 57 | config.merge(environ, { 58 | convert('ENV_PRESENT', int): 'present', 59 | convert('ENV_ABSENT', int): 'absent', 60 | }) 61 | 62 | compare(config.present, expected=1) 63 | compare(config.absent, expected='da') 64 | 65 | def test_overlay(self, dir): 66 | pytest.importorskip("yaml") 67 | path1 = dir.write('etc/myapp.yml', ''' 68 | base: 1 69 | user: bad 70 | file: bad 71 | ''') 72 | 73 | path2 = dir.write('home/.myapp.yml', ''' 74 | user: 2 75 | file: bad-user 76 | ''') 77 | 78 | path3 = dir.write('app.yml', ''' 79 | file: 3 80 | ''') 81 | 82 | config = Config.from_path(path1) 83 | config.merge(Config.from_path(path2)) 84 | config.merge(Config.from_path(path3)) 85 | 86 | compare(config.base, expected=1) 87 | compare(config.user, expected=2) 88 | compare(config.file, expected=3) 89 | 90 | 91 | def test_fake_fs(fs): 92 | pytest.importorskip("yaml") 93 | fs.create_file('/foo/bar.yml', contents='foo: 1\n') 94 | config = Config.from_path('/foo/bar.yml') 95 | compare(config.foo, expected=1) 96 | -------------------------------------------------------------------------------- /CHANGELOG.rst: -------------------------------------------------------------------------------- 1 | Changes 2 | ======= 3 | 4 | 3.2.0 (13 Sep 2023) 5 | ------------------- 6 | 7 | - Use the standard library's ``tomllib`` module on Python 3.11 and above and 8 | switch to `tomli`__ for earlier versions. 9 | 10 | __ https://github.com/hukkin/tomli 11 | 12 | Thanks to Wim Glenn for the work! 13 | 14 | 3.1.0 (28 Nov 2022) 15 | ------------------- 16 | 17 | - Add support for deleting items and attributes via the :class:`~.node.ConfigNode` 18 | interface. 19 | 20 | 3.0.0 (20 Sep 2022) 21 | ------------------- 22 | 23 | - Drop Python 2 support. 24 | 25 | - :meth:`~.node.ConfigNode.get` can now be used without specifying a name 26 | to return the value of nodes representing simple attributes. 27 | 28 | - Explicit support for :class:`pathlib.Path` objects. 29 | 30 | 31 | 2.6.0 (5 Aug 2020) 32 | ------------------ 33 | 34 | - :class:`~.node.ConfigNode` instances can now be :meth:`merged `. 35 | 36 | - Add support for an optional root in each file when using the 37 | :ref:`"extends" ` pattern. 38 | 39 | 2.5.1 (4 Jun 2020) 40 | ------------------ 41 | 42 | - Change default `false_values` for :meth:`if_supplied` to be a :class:`tuple` rather 43 | a :class:`frozenset`, fixing a bug that occurred when source data wasn't hashable. 44 | 45 | 2.5.0 (1 Jun 2020) 46 | ------------------ 47 | 48 | - Move the code for the "Config file that extends another config file" pattern into 49 | a helper function in :func:`configurator.patterns.load_with_extends`. 50 | 51 | 2.4.0 (31 May 2020) 52 | ------------------- 53 | 54 | - Allow the list of values considered false by :meth:`if_supplied` to be specified. 55 | 56 | - :meth:`if_supplied` no longer considers ``False`` to be false, as when present, that's 57 | often an explicitly provided value. 58 | 59 | 2.3.0 (27 May 2020) 60 | ------------------- 61 | 62 | - :class:`Config` instances can now be pickled. 63 | 64 | 2.2.0 (25 May 2020) 65 | ------------------- 66 | 67 | - :func:`value` has been added to allow literal values to be used in the left 68 | side of mappings passed to :meth:`Config.merge`. 69 | 70 | 2.1.0 (25 May 2020) 71 | ------------------- 72 | 73 | - Configuration values my now be set using attribute or item setting on 74 | :class:`~.node.ConfigNode` instances. 75 | 76 | - :meth:`~.node.ConfigNode.node` can be used to obtain or create a 77 | :class:`~.node.ConfigNode` from a dotted path and will give you a node even 78 | for a value of a :class:`dict` or item in a :class:`list`. 79 | 80 | - :class:`~.node.ConfigNode` instances now have a :meth:`~.node.ConfigNode.set` 81 | method that can be used to replace the value of that part of the configuration, 82 | regardless of whether it is a container, list item or dictionary value. 83 | 84 | 2.0.0 (15 Apr 2020) 85 | ------------------- 86 | 87 | - Performance improvements when import parsers. 88 | 89 | - Removed the ability to provide new parsers using `pkg_resources` 90 | entry points. 91 | 92 | 1.3.0 (29 Jan 2020) 93 | ------------------- 94 | 95 | - Add :meth:`Config.from_env` class method to help with extacting 96 | large numbers of environment variables into configuration. 97 | 98 | 1.2.0 (29 May 2019) 99 | ------------------- 100 | 101 | - Enable the context manager returned by :meth:`Config.push` to return 102 | the state of a global config object to what it was before :meth:`~Config.push` 103 | was called. 104 | 105 | 1.1.0 (29 May 2019) 106 | ------------------- 107 | 108 | - Add support for :meth:`pushing ` and :meth:`popping ` 109 | config data onto a global :class:`Config`. 110 | 111 | 1.0.0 (4 Apr 2019) 112 | ------------------ 113 | 114 | - Support for optional configuration files in :meth:`Config.from_path`. 115 | 116 | - Add :meth:`if_supplied` mapping operation. 117 | 118 | - Fully documented. 119 | 120 | 0.5.0 (15 Mar 2019) 121 | --------------------- 122 | 123 | - Initial release without docs. 124 | 125 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | 2 | configurator 3 | ============ 4 | 5 | |CircleCI|_ |Docs|_ 6 | 7 | .. |CircleCI| image:: https://circleci.com/gh/simplistix/configurator/tree/master.svg?style=shield 8 | .. _CircleCI: https://circleci.com/gh/simplistix/configurator/tree/master 9 | 10 | .. |Docs| image:: https://readthedocs.org/projects/configurator/badge/?version=latest 11 | .. _Docs: http://configurator.readthedocs.org/en/latest/ 12 | 13 | This is a Python library for building a configuration store 14 | from one or more layered configuration sources. 15 | These are most commonly files, with YAML, TOML and JSON support included 16 | and other formats easily added. 17 | The sources don't have to be files, and support is included for both environment 18 | variables and command line options. 19 | 20 | In addition to an easy to use interface, configuration information is also made available 21 | as nested, simple python data types so that you can validate the schema of your configuration 22 | using the tool of your choice. 23 | 24 | Quickstart 25 | ~~~~~~~~~~ 26 | 27 | .. invisible-code-block: python 28 | 29 | fs.create_file('/etc/my_app/config.yaml', 30 | contents='cache:\n location: /var/my_app/\n') 31 | fs.create_dir('/var/logs/myapp/') 32 | replace('os.environ.MYAPP_THREADS', '2', strict=False) 33 | replace('os.environ.MYAPP_CACHE_DIRECTORY', '/var/logs/myapp/', strict=False) 34 | replace('sys.argv', ['myapp.py', '--threads', '3', '--max-files', '200']) 35 | from pprint import pprint 36 | 37 | To install the library, go for: 38 | 39 | .. code-block:: bash 40 | 41 | pip install configurator[yaml,toml] 42 | 43 | Here's how you would handle a layered set of defaults, system-wide config 44 | and then optional per-user config: 45 | 46 | .. code-block:: python 47 | 48 | 49 | from configurator import Config 50 | 51 | defaults = Config({ 52 | 'cache': { 53 | 'location': '/tmp/my_app', 54 | 'max_files': 100, 55 | }, 56 | 'banner': 'default banner', 57 | 'threads': 1, 58 | }) 59 | system = Config.from_path('/etc/my_app/config.yaml') 60 | user = Config.from_path('~/.my_app.yaml', optional=True) 61 | config = defaults + system + user 62 | 63 | Now, if we wanted configuration from the environment and command line 64 | arguments to override those provided in configuration files, we could do so 65 | as follows: 66 | 67 | .. code-block:: python 68 | 69 | import os 70 | from argparse import ArgumentParser 71 | from configurator import convert, target, required 72 | 73 | config.merge(os.environ, { 74 | convert('MYAPP_THREADS', int): 'threads', 75 | required('MYAPP_CACHE_DIRECTORY'): 'cache.location', 76 | }) 77 | 78 | parser = ArgumentParser() 79 | parser.add_argument('--threads', type=int) 80 | parser.add_argument('--max-files', type=int) 81 | args = parser.parse_args() 82 | 83 | config.merge(args, { 84 | 'threads': 'threads', 85 | 'max_files': 'cache.max_files', 86 | }) 87 | 88 | To check the configuration we've accumulated is sensible we can use a data validation library 89 | such as `Voluptuous`__: 90 | 91 | __ https://github.com/alecthomas/voluptuous 92 | 93 | .. code-block:: python 94 | 95 | from os.path import exists 96 | from voluptuous import Schema, All, Required, PathExists 97 | 98 | schema = Schema({ 99 | 'cache': {'location': All(str, PathExists()), 'max_files': int}, 100 | 'banner': Required(str), 101 | 'threads': Required(int), 102 | }) 103 | 104 | schema(config.data) 105 | 106 | So, with all of the above, we could use the following sources of configuration: 107 | 108 | >>> import os, sys 109 | >>> print(open('/etc/my_app/config.yaml').read()) 110 | cache: 111 | location: /var/my_app/ 112 | 113 | >>> os.environ['MYAPP_THREADS'] 114 | '2' 115 | >>> os.environ['MYAPP_CACHE_DIRECTORY'] 116 | '/var/logs/myapp/' 117 | >>> sys.argv 118 | ['myapp.py', '--threads', '3', '--max-files', '200'] 119 | 120 | With the above sources of configuration, we'd end up with a configuration store that we can use as 121 | follows: 122 | 123 | >>> config.cache.location 124 | '/var/logs/myapp/' 125 | >>> config.cache.max_files 126 | 200 127 | >>> config.banner 128 | 'default banner' 129 | >>> config.threads 130 | 3 131 | -------------------------------------------------------------------------------- /configurator/path.py: -------------------------------------------------------------------------------- 1 | class NotPresent(Exception): pass 2 | 3 | 4 | class Op: 5 | 6 | name = 'op' 7 | 8 | def get(self, data): 9 | raise TypeError('Cannot use %s() in source' % self.name) 10 | 11 | def ensure(self, *args): 12 | raise TypeError('Cannot use %s() as target' % self.name) 13 | 14 | set = ensure 15 | 16 | def not_present(self, data): 17 | return data 18 | 19 | 20 | class ItemOp(Op): 21 | 22 | def __init__(self, text): 23 | self.text = text 24 | 25 | def get(self, data): 26 | try: 27 | return data[self.text] 28 | except (KeyError, IndexError): 29 | return NotPresent(self.text) 30 | 31 | def ensure(self, data): 32 | try: 33 | return data[self.text] 34 | except KeyError: 35 | data[self.text] = value = {} 36 | return value 37 | 38 | def set(self, data, value, _): 39 | data[self.text] = value 40 | 41 | def str(self, base): 42 | return '{}[{!r}]'.format(base, self.text) 43 | 44 | 45 | class AttrOp(Op): 46 | 47 | def __init__(self, text): 48 | self.text = text 49 | 50 | def get(self, data): 51 | return getattr(data, self.text, NotPresent(self.text)) 52 | 53 | def ensure(self, data): 54 | return getattr(data, self.text) 55 | 56 | def set(self, data, value, _): 57 | setattr(data, self.text, value) 58 | 59 | def str(self, base): 60 | return '{}.{}'.format(base, self.text) 61 | 62 | 63 | class TextOp(Op): 64 | 65 | def __init__(self, text): 66 | self.text = text 67 | 68 | def get(self, data): 69 | getitem = getattr(data, '__getitem__', None) 70 | if getitem is None: 71 | return getattr(data, self.text, NotPresent(self.text)) 72 | else: 73 | try: 74 | return getitem(self.text) 75 | except KeyError: 76 | return NotPresent(self.text) 77 | 78 | def ensure(self, data): 79 | getitem = getattr(data, '__getitem__', None) 80 | if getitem is None: 81 | return getattr(data, self.text) 82 | else: 83 | try: 84 | return getitem(self.text) 85 | except KeyError: 86 | data[self.text] = value = {} 87 | return value 88 | 89 | def set(self, data, value, _): 90 | setitem = getattr(data, '__setitem__', None) 91 | if setitem is None: 92 | return setattr(data, self.text, value) 93 | else: 94 | return setitem(self.text, value) 95 | 96 | def str(self, base): 97 | if base: 98 | return base+'.'+self.text 99 | else: 100 | return self.text 101 | 102 | 103 | class ConvertOp(Op): 104 | 105 | name = 'convert' 106 | 107 | def __init__(self, callable_): 108 | self.callable = callable_ 109 | 110 | def get(self, data): 111 | return self.callable(data) 112 | 113 | def str(self, base): 114 | callable_str = getattr(self.callable, '__name__', repr(self.callable)) 115 | return '{}({}, {})'.format(self.name, base, callable_str) 116 | 117 | 118 | class RequiredOp(Op): 119 | 120 | name = 'required' 121 | 122 | def get(self, data): 123 | return data 124 | 125 | def not_present(self, data): 126 | raise data 127 | 128 | def str(self, base): 129 | return '{}({})'.format(self.name, base) 130 | 131 | 132 | class IfSuppliedOp(Op): 133 | 134 | name = 'if_supplied' 135 | 136 | def __init__(self, false_values): 137 | self.false_values = false_values 138 | 139 | def get(self, data): 140 | if data in self.false_values: 141 | return NotPresent(data) 142 | return data 143 | 144 | def str(self, base): 145 | return '{}({})'.format(self.name, base) 146 | 147 | 148 | class InsertOp(Op): 149 | 150 | name = 'insert' 151 | 152 | def __init__(self, index): 153 | self.index = index 154 | 155 | def ensure(self, data): 156 | value = {} 157 | data.insert(self.index, value) 158 | return value 159 | 160 | def set(self, data, value, _): 161 | data.insert(self.index, value) 162 | 163 | def str(self, base): 164 | return '{}.{}({!r})'.format(base, self.name, self.index) 165 | 166 | 167 | class AppendOp(Op): 168 | 169 | name = 'append' 170 | 171 | def ensure(self, data): 172 | value = {} 173 | data.append(value) 174 | return value 175 | 176 | def set(self, data, value, _): 177 | data.append(value) 178 | 179 | def str(self, base): 180 | return '{}.{}()'.format(base, self.name) 181 | 182 | 183 | class MergeOp(Op): 184 | 185 | name = 'merge' 186 | 187 | def ensure(self, data): 188 | raise TypeError('merge() must be final operation') 189 | 190 | def set(self, data, value, context): 191 | return context.merge(data, value) 192 | 193 | def str(self, base): 194 | return '{}.{}()'.format(base, self.name) 195 | 196 | 197 | class ValueOp(Op): 198 | 199 | name = 'value' 200 | 201 | def __init__(self, value): 202 | self.value = value 203 | 204 | def get(self, data): 205 | return self.value 206 | 207 | def str(self, base): 208 | return '{}({!r})'.format(self.name, self.value) 209 | 210 | 211 | class Path: 212 | """ 213 | A generative object used for constructing source or target mappings. 214 | See :doc:`mapping` for details. 215 | """ 216 | 217 | def __init__(self, name, *ops): 218 | self.name = name 219 | self.ops = ops 220 | 221 | def _extend(self, op): 222 | return type(self)(self.name, *(self.ops + (op,))) 223 | 224 | def __getitem__(self, name): 225 | """ 226 | Indicate that the source or target should be traversed by item access. 227 | """ 228 | return self._extend(ItemOp(name)) 229 | 230 | def __getattr__(self, name): 231 | """ 232 | Indicate that the source or target should be traversed by attribute access. 233 | """ 234 | return self._extend(AttrOp(name)) 235 | 236 | def insert(self, index): 237 | """ 238 | Indicate that a target should be mapped by inserting at the specified 239 | index. 240 | """ 241 | return self._extend(InsertOp(index)) 242 | 243 | def append(self): 244 | """ 245 | Indicate that a target should be mapped by appending. 246 | """ 247 | return self._extend(AppendOp()) 248 | 249 | def merge(self): 250 | """ 251 | Indicate that a target should be mapped by merging. 252 | """ 253 | return self._extend(MergeOp()) 254 | 255 | def __str__(self): 256 | str = self.name 257 | for op in self.ops: 258 | str = op.str(str) 259 | return str 260 | 261 | def __repr__(self): 262 | return 'Path:{}'.format(str(self)) 263 | 264 | 265 | def parse_text(segment): 266 | if isinstance(segment, str): 267 | segment = Path('', *(TextOp(part) for part in segment.split('.'))) 268 | elif not isinstance(segment, Path): 269 | segment = Path('', ItemOp(segment)) 270 | return segment 271 | -------------------------------------------------------------------------------- /configurator/config.py: -------------------------------------------------------------------------------- 1 | import os 2 | from copy import deepcopy 3 | from io import open, StringIO 4 | from os.path import exists, expanduser 5 | 6 | from .mapping import load, store, target, convert, if_supplied 7 | from .merge import MergeContext 8 | from .node import ConfigNode 9 | from .parsers import Parsers 10 | from .path import parse_text 11 | 12 | 13 | class Config(ConfigNode): 14 | """ 15 | The root of the configuration store. 16 | """ 17 | 18 | __slots__ = ConfigNode.__slots__+('_previous',) 19 | 20 | parsers = Parsers() 21 | 22 | def __init__(self, data=None): 23 | super(Config, self).__init__(data) 24 | self._previous = [] 25 | 26 | @classmethod 27 | def from_text(cls, text, parser, encoding='ascii'): 28 | """ 29 | Construct a :class:`Config` from the provided ``text`` using the specified 30 | :ref:`parser `. If ``text`` is provided as :class:`bytes`, then 31 | the ``encoding`` specified will be used to decode it. 32 | """ 33 | if isinstance(text, bytes): 34 | text = text.decode(encoding) 35 | return cls.from_stream(StringIO(text), parser) 36 | 37 | @classmethod 38 | def from_stream(cls, stream, parser=None): 39 | """ 40 | Construct a :class:`Config` from a stream such as a :func:`file `. 41 | If the stream does not have a ``name`` attribute from which the correct 42 | parser can be guessed, :ref:`parser ` must be specified. 43 | If ``text`` is provided as :class:`bytes`, then the ``encoding`` 44 | specified will be used to decode it. 45 | """ 46 | if parser is None: 47 | name = getattr(stream, 'name', None) 48 | if name is not None: 49 | try: 50 | _, parser = name.rsplit('.', 1) 51 | except ValueError: 52 | pass 53 | if not callable(parser): 54 | parser = cls.parsers[parser] 55 | return cls(parser(stream)) 56 | 57 | @classmethod 58 | def from_path(cls, path, parser=None, encoding=None, optional=False): 59 | """ 60 | Construct a :class:`Config` from file specified as either a string path or a 61 | :class:`pathlib.Path`. 62 | This path with have ``~`` expanded. If specified, ``encoding`` will be used to 63 | decode the content of the file. An explicit :ref:`parser ` 64 | can be specified, if necessary, but the correct one will usually be 65 | guessed from the file extension. 66 | 67 | If ``optional`` is ``True``, then an empty :class:`Config` will be 68 | returned if the file does not exist. 69 | """ 70 | full_path = expanduser(path) 71 | if optional and not exists(full_path): 72 | return cls() 73 | with open(full_path, encoding=encoding) as stream: 74 | return cls.from_stream(stream, parser) 75 | 76 | @classmethod 77 | def from_env(cls, prefix, types=None): 78 | """ 79 | Construct a :class:`Config` from :data:`os.environ` entries 80 | that matches the specified ``prefix``. 81 | ``prefix`` can either be a simple string prefix 82 | or a :class:`dict` mapping string prefixes to the 83 | :doc:`target paths ` at which they will be stored. 84 | ``types`` is an optional :class:`dict` mapping string suffixes 85 | to callables used to convert matching environment values to the 86 | correct type. 87 | """ 88 | if not isinstance(prefix, dict): 89 | prefixes = {prefix: target} 90 | else: 91 | prefixes = prefix 92 | mapping = {} 93 | for key, value in os.environ.items(): 94 | for prefix, prefix_target in prefixes.items(): 95 | if key.startswith(prefix): 96 | prefix_source = if_supplied(key) 97 | prefix_target = parse_text(prefix_target)[key[len(prefix):].lower()] 98 | if types is not None: 99 | for suffix, type_ in types.items(): 100 | if key.endswith(suffix): 101 | prefix_source = convert(prefix_source, type_) 102 | mapping[prefix_source] = prefix_target 103 | 104 | config = cls() 105 | config.merge(os.environ, mapping) 106 | return config 107 | 108 | def merge(self, source=None, mapping=None, mergers=None): 109 | """ 110 | Modify this :class:`Config` by merging the provided ``source`` into 111 | it using any ``mapping`` or ``mergers`` provided. 112 | 113 | See :doc:`mapping` for more detail. 114 | """ 115 | if isinstance(source, ConfigNode): 116 | source = source.data 117 | context = MergeContext(mergers) 118 | if mapping is None: 119 | self.data = context.merge(source, self.data) 120 | else: 121 | for source_path, target_path in mapping.items(): 122 | value = load(source, source_path) 123 | self.data = store(self.data, target_path, value, context) 124 | 125 | def clone(self): 126 | """ 127 | Clone this :class:`Config` creating copies of all mutable objects 128 | it contains. 129 | """ 130 | return Config(deepcopy(self.data)) 131 | 132 | def __add__(self, other): 133 | """ 134 | Configuration stores can be added together. 135 | This will result in a new :class:`Config` object being returned that 136 | is created by merging the two original configs. 137 | """ 138 | result = self.clone() 139 | result.merge(other) 140 | return result 141 | 142 | def push(self, config=None, empty=False): 143 | """ 144 | Push the provided ``config`` onto this instance, replacing the data 145 | of this :class:`Config`. 146 | 147 | If ``empty`` is ``False``, this is done by merging the existing 148 | contents with the provided ``config``, giving precedence to the 149 | ``config`` passed in. 150 | 151 | If ``empty`` is ``True``, then the provided ``config`` is used to 152 | entirely replace the data of this :class:`Config`. 153 | 154 | ``config`` may either be a :class:`Config` instance or anything 155 | that would be passed to the :class:`Config` constructor. 156 | 157 | This method returns a context manager that, when its context is left, 158 | restores the configuration data used to whatever was in place 159 | before :meth:`push` was called, regardless of any further :meth:`push` 160 | or :meth:`merge` calls, or other modifications to this :class:`Config` object. 161 | """ 162 | if empty: 163 | base = Config() 164 | else: 165 | base = self.clone() 166 | if not isinstance(config, Config): 167 | config = Config(config) 168 | self._previous.append(self.data) 169 | context = PushContext(self, self.data) 170 | self.data = (base + config).data 171 | return context 172 | 173 | def pop(self): 174 | """ 175 | Pop off the top-most data that was last :meth:`pushed ` 176 | on to this :class:`Config`. 177 | """ 178 | self.data = self._previous.pop() 179 | 180 | 181 | class PushContext: 182 | 183 | def __init__(self, config, data): 184 | self.config = config 185 | self.data = data 186 | 187 | def __enter__(self): 188 | return self.config 189 | 190 | def __exit__(self, exc_type, exc_val, exc_tb): 191 | while self.config.data is not self.data: 192 | self.config.pop() 193 | -------------------------------------------------------------------------------- /configurator/node.py: -------------------------------------------------------------------------------- 1 | from pprint import pformat 2 | 3 | from .path import parse_text, NotPresent 4 | 5 | 6 | class ConfigNode: 7 | """ 8 | A node in the configuration store. 9 | These are obtained by using the methods below on :class:`~configurator.Config` 10 | objects or any :class:`ConfigNode` objects returned by them. 11 | """ 12 | 13 | __slots__ = ('_container', '_accessor', 'data') 14 | 15 | def __init__(self, data=None, container=None, accessor=None): 16 | if data is None: 17 | data = {} 18 | 19 | #: The underlying python object for this node, often a :class:`dict` 20 | #: or a :class:`list`. 21 | #: 22 | #: .. warning:: 23 | #: 24 | #: :attr:`data` may be read but should not be modified as problems will occur 25 | #: if the :class:`~configurator.node.ConfigNode` hierarchy and :attr:`data` 26 | #: hierarchy become out of sync. 27 | self.data = data 28 | self._container = container 29 | self._accessor = accessor 30 | 31 | def _wrap(self, accessor, value): 32 | if isinstance(value, (dict, list)): 33 | value = ConfigNode(value, self.data, accessor) 34 | return value 35 | 36 | def _get(self, name): 37 | try: 38 | value = self.data[name] 39 | except TypeError: 40 | raise KeyError(name) 41 | return self._wrap(name, value) 42 | 43 | def __getattr__(self, name): 44 | """ 45 | Obtain a child of this node by attribute access. If the child 46 | is a :class:`dict` or :class:`list`, a :class:`ConfigNode` for it will 47 | be returned, otherwise the value itself will be returned. 48 | """ 49 | try: 50 | return self._get(name) 51 | except KeyError: 52 | raise AttributeError(name) 53 | 54 | def __setattr__(self, name, value): 55 | """ 56 | Set a child of this node. This is a convenience helper that 57 | calls :meth:`__setitem__` and can be used when ``name`` is a string. 58 | """ 59 | if name in self.__slots__: 60 | super(ConfigNode, self).__setattr__(name, value) 61 | else: 62 | self[name] = value 63 | 64 | def __delattr__(self, name): 65 | """ 66 | Remove a child of this node by attribute access. This is a convenience 67 | helper that calls :meth:`__delitem__` and can be used when ``name`` is 68 | a string. 69 | """ 70 | try: 71 | del self[name] 72 | except KeyError: 73 | raise AttributeError(name) 74 | 75 | def __getitem__(self, name): 76 | """ 77 | Obtain a child of this node by item access. If the child 78 | is a :class:`dict` or :class:`list`, a :class:`ConfigNode` for it will 79 | be returned, otherwise the value itself will be returned. 80 | """ 81 | return self._get(name) 82 | 83 | def __setitem__(self, name, value): 84 | """ 85 | Set the ``value`` for the supplied ``name`` in :attr:`data`. 86 | If :attr:`data` is a :class:`dict`, then ``name`` must be a :class:`str`. 87 | If :attr:`data` is a :class:`list`, then ``name`` must be an :class:`int`. 88 | """ 89 | self.data[name] = value 90 | 91 | def __delitem__(self, name): 92 | """ 93 | Remove the supplied ``name`` in :attr:`data`. 94 | If :attr:`data` is a :class:`dict`, then ``name`` must be a :class:`str`. 95 | If :attr:`data` is a :class:`list`, then ``name`` must be an :class:`int`. 96 | """ 97 | del self.data[name] 98 | 99 | def get(self, name=None, default=None): 100 | """ 101 | Obtain a child of this node by access like :meth:`dict.get`. If the child 102 | is a :class:`dict` or :class:`list`, a :class:`ConfigNode` for it will 103 | be returned, otherwise the value itself will be returned. 104 | 105 | If ``name`` is not specified, the :attr:`data` for this node will be returned. 106 | This can be helpful when using :meth:`node` to return :class:`ConfigNode` objects 107 | for simple values. 108 | """ 109 | if name is None: 110 | return self.data 111 | try: 112 | return self._get(name) 113 | except KeyError: 114 | return default 115 | 116 | def items(self): 117 | """ 118 | Obtain children of this node by access like :meth:`dict.items`. If the child 119 | is a :class:`dict` or :class:`list`, a :class:`ConfigNode` for it will 120 | be returned, otherwise the value itself will be returned. 121 | """ 122 | for key, value in self.data.items(): 123 | yield key, self._wrap(key, value) 124 | 125 | def __iter__(self): 126 | """ 127 | Obtain children of this node by either :class:`dict` or :class:`list` 128 | iteration, depending on the type of the underlying :attr:`data`. 129 | If the child is a :class:`dict` or :class:`list`, a :class:`ConfigNode` 130 | for it will be returned, otherwise the value itself will be returned. 131 | """ 132 | for index, item in enumerate(self.data): 133 | yield self._wrap(index, item) 134 | 135 | def node(self, path=None, create=False): 136 | """ 137 | Obtain a child of this node using a dotted path or 138 | :class:`~configurator.path.Path` such as one generated from 139 | :attr:`~configurator.source`. ``path`` may also be a simple string or integer, in 140 | which case it will be used to obtain a child of this node using item access. 141 | 142 | This always returns a :class:`ConfigNode` or raises an exception if the path 143 | cannot be resolved. This allows you to use :meth:`set` even for values in dictionaries 144 | or items in lists. 145 | 146 | If ``create`` is ``True``, all nodes along the path will be created as dictionaries 147 | if they do not exist. 148 | """ 149 | if path is None: 150 | return self 151 | 152 | path = parse_text(path) 153 | if not path.ops: 154 | return self 155 | 156 | if create: 157 | action = 'ensure' 158 | else: 159 | action = 'get' 160 | 161 | data = self.data 162 | for op in path.ops: 163 | if isinstance(data, NotPresent): 164 | op.not_present(data) 165 | else: 166 | container = data 167 | data = getattr(op, action)(container) 168 | 169 | if isinstance(data, NotPresent): 170 | raise data 171 | 172 | text = getattr(op, 'text', None) 173 | if text is None: 174 | raise TypeError('invalid path: '+str(path)) 175 | 176 | return ConfigNode(data, container, op.text) 177 | 178 | def set(self, value): 179 | """ 180 | Replace the :attr:`data` of this node with the supplied ``value``. 181 | """ 182 | if self._container is None: 183 | self.data = value 184 | else: 185 | self._container[self._accessor] = value 186 | 187 | def __repr__(self): 188 | cls = type(self) 189 | pretty = pformat(self.data, width=70) 190 | if '\n' in pretty: 191 | pretty = '\n'+pretty+'\n' 192 | return '{}.{}({})'.format( 193 | cls.__module__, 194 | cls.__name__, 195 | pretty 196 | ) 197 | 198 | def __getstate__(self): 199 | return {name: getattr(self, name) for name in self.__slots__} 200 | 201 | def __setstate__(self, data): 202 | for name, value in data.items(): 203 | setattr(self, name, value) 204 | -------------------------------------------------------------------------------- /docs/mapping.rst: -------------------------------------------------------------------------------- 1 | Mapping and Merging 2 | =================== 3 | 4 | .. py:currentmodule:: configurator 5 | 6 | .. invisible-code-block: python 7 | 8 | from configurator import Config 9 | from testfixtures.mock import Mock 10 | 11 | Configurator provides flexible tools for combining configuration information from 12 | multiple sources. These are split into two approaches as described below. 13 | 14 | Mapping 15 | ------- 16 | 17 | This is the process of extracting parts of a configuration source and mapping 18 | them into locations in a target :class:`Config`. 19 | 20 | Dotted paths 21 | ~~~~~~~~~~~~ 22 | 23 | The most common way of doing this is to use dotted paths for both the source and 24 | target of the mappings. These instruct the mapping machinery to traverse by either 25 | item or attribute access, as appropriate, to deal with the mapping of deeply nested 26 | source arguments to deeply nested target attributes. 27 | 28 | For example, suppose we wanted to map these attributes: 29 | 30 | >>> mock = Mock() 31 | >>> mock.foo.bar.bob = 42 32 | >>> mock.baz = {'key': 'value'} 33 | >>> del mock.default 34 | 35 | To get them into the following config: 36 | 37 | >>> config = Config({'cache_size': 13, 'database': {'username': 'test'}}) 38 | 39 | We could map them in as follows: 40 | 41 | >>> config.merge(mock, { 42 | ... 'foo.bar.bob': 'cache_size', 43 | ... 'baz.key': 'keys.baz', 44 | ... 'default.password': 'database.password', 45 | ... }) 46 | >>> config 47 | configurator.config.Config( 48 | {'cache_size': 42, 49 | 'database': {'username': 'test'}, 50 | 'keys': {'baz': 'value'}} 51 | ) 52 | 53 | As the above example shows: 54 | 55 | - If a target container in a traversal path doesn't exist, it will be created as a 56 | dictionary. 57 | - If any element of a source path does not exist, then the target side of the mapping is 58 | not performed. If you'd expect an exception to be raised here, see the "Operations" 59 | section below. 60 | 61 | Generative paths 62 | ~~~~~~~~~~~~~~~~ 63 | 64 | If you require even more fine grained control of the mapping process, generative paths can 65 | be used instead of dotted paths. For the example from above, the generative equivalent 66 | would be: 67 | 68 | >>> from configurator import Config, source, target 69 | >>> config = Config({'cache_size': 13, 'database': {'username': 'test'}}) 70 | >>> config.merge(mock, { 71 | ... source.foo.bar.bob: target['cache_size'], 72 | ... source.baz['key']: target['keys']['baz'], 73 | ... source.default.password: target['database']['password'], 74 | ... }) 75 | >>> config 76 | configurator.config.Config( 77 | {'cache_size': 42, 78 | 'database': {'username': 'test'}, 79 | 'keys': {'baz': 'value'}} 80 | ) 81 | 82 | The above example shows that this approach is more verbose and explicit, but where it 83 | becomes required is if you need to perform more specific configuration manipulation. 84 | 85 | For example, suppose we had this configuration: 86 | 87 | >>> config = Config({'actions': ['b', 'c']}) 88 | 89 | Now we want to merge in this set of actions, but we'd like to insert them after ``b`` 90 | to create the final sequence: 91 | 92 | >>> empty = Config(['a', 'd']) 93 | 94 | Using generative paths, we could do this: 95 | 96 | >>> config.merge(source=empty, mapping={ 97 | ... source[0]: target['actions'].insert(0), 98 | ... source[-1]: target['actions'].append(), 99 | ... }) 100 | >>> config 101 | configurator.config.Config({'actions': ['a', 'b', 'c', 'd']}) 102 | 103 | Generative paths also provide the ability to merge subsections of a config: 104 | 105 | >>> config1 = Config({'foo': {'bar': 'baz'}}) 106 | >>> config2 = Config({'alpha': 'beta'}) 107 | >>> config2.merge(config1, mapping={'foo': target.merge()}) 108 | >>> config2 109 | configurator.config.Config({'alpha': 'beta', 'bar': 'baz'}) 110 | 111 | As you can see, dotted and generative paths can also be used interchangeably. 112 | Generative merging can also be used to merge one config into a section within another: 113 | 114 | >>> config1 = Config({'foo': 'bar'}) 115 | >>> config2 = Config({'alpha': {'beta': 'gamma'}}) 116 | >>> config2.merge(config1, mapping={source: target['alpha'].merge()}) 117 | >>> config2 118 | configurator.config.Config({'alpha': {'beta': 'gamma', 'foo': 'bar'}}) 119 | 120 | .. note:: 121 | 122 | When using attribute access in a generative path, this means *only* attribute access: 123 | 124 | >>> config1 = Config({'foo': 'bar'}) 125 | >>> config2 = Config({'alpha': {'beta': 'gamma'}}) 126 | >>> config2.merge(config1, mapping={source: target.alpha.merge()}) 127 | Traceback (most recent call last): 128 | ... 129 | AttributeError: 'dict' object has no attribute 'alpha' 130 | 131 | This may result in exceptions being raised when they're used on the target side of a 132 | mapping, or the source side being treated as not present. 133 | 134 | For this reason, it's better to stick to dotted paths unless you need the specific 135 | behaviour offered by generative mapping. 136 | 137 | Generative paths can also be used to provide literal values on the source: 138 | 139 | >>> from configurator import Config, value 140 | >>> config = Config() 141 | >>> config.merge(mapping={value(42): 'version.minor'}) 142 | >>> config 143 | configurator.config.Config({'version': {'minor': 42}}) 144 | 145 | Operations 146 | ~~~~~~~~~~ 147 | 148 | Some behaviour is better expressed as a function operating on a mapping path. 149 | 150 | required 151 | ^^^^^^^^ 152 | 153 | The default handling of mappings where the source-side is not present is to do nothing, 154 | rather than raising an exception: 155 | 156 | >>> Config().merge(source={}, mapping={'foo.bar': 'baz'}) 157 | 158 | If you need to raise an exception when a source mapping is missing, you can use the 159 | :func:`required` operation: 160 | 161 | >>> from configurator import required 162 | >>> Config().merge(source={}, mapping={required('foo.bar'): 'baz'}) 163 | Traceback (most recent call last): 164 | ... 165 | configurator.path.NotPresent: foo 166 | 167 | convert 168 | ^^^^^^^ 169 | 170 | By default, configurator expects data to be of the correct type, with conversion 171 | normally being handled be the parser. Some mapping sources, however, may provide 172 | strings where numbers or booleans are wanted. The :func:`convert` operation can be 173 | used to deal with this: 174 | 175 | >>> from configurator import convert 176 | >>> config = Config() 177 | >>> config.merge(source={'MY_ENV_VAR': '2'}, mapping={convert('MY_ENV_VAR', int): 'foo'}) 178 | >>> config 179 | configurator.config.Config({'foo': 2}) 180 | 181 | if_supplied 182 | ^^^^^^^^^^^ 183 | 184 | Some configuration sources provide defaults such as ``None`` or empty strings that are unhelpful 185 | when mapping into a :class:`Config`. In these cases, the mapping can be configured to treat values 186 | as not present if they match Python's definition of "false" by using the :func:`if_supplied` 187 | operation: 188 | 189 | >>> from argparse import Namespace 190 | >>> from configurator import if_supplied 191 | >>> config = Config() 192 | >>> config.merge( 193 | ... source=Namespace(my_option=None), 194 | ... mapping={if_supplied('my_option'): 'some_key'} 195 | ... ) 196 | >>> config 197 | configurator.config.Config({}) 198 | 199 | Merging 200 | -------- 201 | 202 | This is the process of combining two :class:`Config` objects. 203 | By default, this involves unioning dictionaries and concatenating lists: 204 | 205 | >>> config1 = Config({'dict': {'a': 1, 'b': 2}, 'list': ['a']}) 206 | >>> config2 = Config({'dict': {'b': 3, 'c': 4}, 'list': ['b']}) 207 | >>> config1 + config2 208 | configurator.config.Config({'dict': {'a': 1, 'b': 3, 'c': 4}, 'list': ['a', 'b']}) 209 | 210 | Merging is performed using a configurable mapping of python types to merge functions. 211 | This can be augmented or completely replaced by using the :meth:`~Config.merge` method. 212 | 213 | For example, if we wished to support :class:`tuple` merging by concatenation, we could 214 | re-use the merge function for lists: 215 | 216 | >>> from configurator.merge import default_mergers, merge_list 217 | >>> config1 = Config(('a', 'b')) 218 | >>> config2 = Config(('c', 'd')) 219 | >>> config1.merge(config2, mergers=default_mergers+{tuple: merge_list}) 220 | >>> config1 221 | configurator.config.Config(('a', 'b', 'c', 'd')) 222 | 223 | The :attr:`default_mergers` mapping supports addition to make it easy to add extra 224 | merge functions to the existing ones. If, instead, you want to completely replace 225 | the mapping, you can use a normal :class:`dict`: 226 | 227 | >>> config1 = Config({'tuple': ('a', 'b')}) 228 | >>> config2 = Config({'tuple': ('c', 'd')}) 229 | >>> config1.merge(config2, mergers={tuple: merge_list}) 230 | Traceback (most recent call last): 231 | ... 232 | TypeError: Cannot merge with 233 | 234 | As you can see, this does mean that any merging that isn't catered for will result in a 235 | :class:`TypeError` being raised. 236 | 237 | When writing a merge function, the ``context`` parameter is there so that merging of 238 | complex data types can be handed off to be handled by whatever is the most appropriate 239 | merge function. This is only likely to be needed when merging mappings, and that 240 | has already been implemented, but should you need to do this, please consult the 241 | source code for :func:`configurator.merge.merge_dict`. 242 | -------------------------------------------------------------------------------- /tests/test_node.py: -------------------------------------------------------------------------------- 1 | from textwrap import dedent 2 | 3 | from testfixtures import compare, ShouldRaise 4 | 5 | from configurator import source, convert 6 | from configurator.node import ConfigNode 7 | from configurator.path import NotPresent 8 | 9 | 10 | class TestInstantiation: 11 | 12 | def test_empty(self): 13 | config = ConfigNode() 14 | compare(config.data, expected={}) 15 | 16 | def test_dict(self): 17 | config = ConfigNode(dict(x=1)) 18 | compare(config['x'], expected=1) 19 | 20 | def test_list(self): 21 | config = ConfigNode([1, 2]) 22 | compare(config[0], expected=1) 23 | compare(config[1], expected=2) 24 | compare(list(config), expected=[1, 2]) 25 | 26 | def test_int(self): 27 | # not very useful, but documents .data as a public api 28 | config = ConfigNode(1) 29 | compare(config.data, 1) 30 | 31 | 32 | class TestItemAccess: 33 | 34 | def test_there_dict(self): 35 | config = ConfigNode({'foo': 1}) 36 | compare(config['foo'], expected=1) 37 | 38 | def test_there_list(self): 39 | config = ConfigNode([1]) 40 | compare(config[0], expected=1) 41 | 42 | def test_not_there_dict(self): 43 | config = ConfigNode({}) 44 | with ShouldRaise(KeyError('foo')): 45 | config['foo'] 46 | 47 | def test_not_there_list(self): 48 | config = ConfigNode([]) 49 | with ShouldRaise(IndexError('list index out of range')): 50 | config[0] 51 | 52 | def test_not_there_simple(self): 53 | config = ConfigNode(1) 54 | with ShouldRaise(KeyError('foo')): 55 | config['foo'] 56 | 57 | def test_get(self): 58 | config = ConfigNode({'foo': 1}) 59 | compare(config.get('foo'), expected=1) 60 | 61 | def test_get_default(self): 62 | config = ConfigNode({}) 63 | compare(config.get('foo', 1), expected=1) 64 | 65 | def test_get_default_default(self): 66 | config = ConfigNode({}) 67 | compare(config.get('foo'), expected=None) 68 | 69 | def test_get_list_number_key(self): 70 | config = ConfigNode([1]) 71 | compare(config.get(0), expected=1) 72 | 73 | def test_get_list_string_key(self): 74 | config = ConfigNode([1]) 75 | compare(config.get('foo'), expected=None) 76 | 77 | def test_simple_value(self): 78 | config = ConfigNode({}) 79 | compare(config.get('foo', 1), expected=1) 80 | 81 | def test_dict_value(self): 82 | config = ConfigNode({'foo': {'bar': 1}}) 83 | obj = config['foo'] 84 | assert isinstance(obj, ConfigNode) 85 | compare(obj.data, expected={'bar': 1}) 86 | 87 | def test_list_value(self): 88 | config = ConfigNode({'foo': [1]}) 89 | obj = config['foo'] 90 | assert isinstance(obj, ConfigNode) 91 | compare(obj.data, expected=[1]) 92 | 93 | def test_get_wrapped_value(self): 94 | config = ConfigNode({'foo': {'bar': 1}}) 95 | obj = config['foo'] 96 | assert isinstance(obj, ConfigNode) 97 | compare(obj.data, expected={'bar': 1}) 98 | 99 | def test_items_dict(self): 100 | config = ConfigNode({'foo': 1, 'bar': 2}) 101 | compare(config.items(), expected=[('foo', 1), ('bar', 2)]) 102 | 103 | def test_items_list(self): 104 | config = ConfigNode([]) 105 | expected = "'list' object has no attribute 'items'" 106 | with ShouldRaise(AttributeError(expected)): 107 | tuple(config.items()) 108 | 109 | def test_items_wrapped_value(self): 110 | config = ConfigNode({'foo': [1]}) 111 | obj = tuple(config.items())[0][1] 112 | assert isinstance(obj, ConfigNode) 113 | compare(obj.data, expected=[1]) 114 | 115 | def test_set_item_dict(self): 116 | config = ConfigNode() 117 | config['foo'] = 1 118 | compare(config.data, expected={'foo': 1}) 119 | 120 | def test_set_item_list(self): 121 | config = ConfigNode(['old']) 122 | config[0] = 'new' 123 | compare(config.data, expected=['new']) 124 | 125 | def test_remove_dict(self): 126 | config = ConfigNode({'foo': 1}) 127 | del config['foo'] 128 | compare(config.data, expected={}) 129 | 130 | def test_remove_not_there_dict(self): 131 | config = ConfigNode({'foo': 1}) 132 | with ShouldRaise(KeyError('bar')): 133 | del config['bar'] 134 | 135 | def test_remove_list(self): 136 | config = ConfigNode(["x", "y"]) 137 | del config[0] 138 | compare(config.data, expected=["y"]) 139 | 140 | def test_remove_not_there_list(self): 141 | config = ConfigNode(["x", "y"]) 142 | with ShouldRaise(IndexError('list assignment index out of range')): 143 | del config[2] 144 | 145 | 146 | class TestAttributeAccess: 147 | 148 | def test_there(self): 149 | config = ConfigNode({'foo': 1}) 150 | compare(config.foo, expected=1) 151 | 152 | def test_not_there(self): 153 | config = ConfigNode({}) 154 | with ShouldRaise(AttributeError('foo')): 155 | config.foo 156 | 157 | def test_simple_value(self): 158 | config = ConfigNode({'foo': 1}) 159 | compare(config.foo, expected=1) 160 | 161 | def test_dict_value(self): 162 | config = ConfigNode({'foo': {'bar': 1}}) 163 | obj = config.foo 164 | assert isinstance(obj, ConfigNode) 165 | compare(obj.data, expected={'bar': 1}) 166 | 167 | def test_list_value(self): 168 | config = ConfigNode({'foo': [1]}) 169 | obj = config.foo 170 | assert isinstance(obj, ConfigNode) 171 | compare(obj.data, expected=[1]) 172 | 173 | def test_name_clash_with_real_attrs(self): 174 | # method, data, __method 175 | config = ConfigNode({'get': 1}) 176 | compare(config.get('get'), expected=1) 177 | 178 | def test_list_source(self): 179 | config = ConfigNode([1]) 180 | with ShouldRaise(AttributeError('foo')): 181 | config.foo 182 | 183 | def test_simple_source(self): 184 | config = ConfigNode(1) 185 | with ShouldRaise(AttributeError('foo')): 186 | config.foo 187 | 188 | def test_set_attr_dict(self): 189 | config = ConfigNode() 190 | config.foo = 1 191 | compare(config.data, expected={'foo': 1}) 192 | 193 | def test_set_attr_list(self): 194 | config = ConfigNode([]) 195 | with ShouldRaise(TypeError): 196 | config.foo = 1 197 | compare(config.data, expected=[]) 198 | 199 | def test_del_attr(self): 200 | config = ConfigNode({"foo": 1}) 201 | del config.foo 202 | compare(config.data, expected={}) 203 | 204 | def test_del_attr_not_there_dict(self): 205 | config = ConfigNode({"foo": 1}) 206 | with ShouldRaise(AttributeError("bar")): 207 | del config.bar 208 | 209 | 210 | class TestOtherFunctionality: 211 | 212 | def test_iterate_over_list_of_dicts(self): 213 | node = ConfigNode([{'x': 1}]) 214 | compare(tuple(node)[0], expected=ConfigNode({'x': 2})) 215 | 216 | def test_iterate_over_dict(self): 217 | node = ConfigNode({'x': 1, 'y': 2}) 218 | compare(sorted(node), expected=['x', 'y']) 219 | 220 | def test_set_on_root(self): 221 | node = ConfigNode({'x': 1, 'y': 2}) 222 | node.set(['a', 'b']) 223 | compare(node.data, expected=['a', 'b']) 224 | 225 | def test_repr(self): 226 | node = ConfigNode({'some long key': 'some\nvalue', 227 | 'another long key': 2, 228 | 'yet another long key': 3}) 229 | compare(repr(node), expected=dedent("""\ 230 | configurator.node.ConfigNode( 231 | {'another long key': 2, 232 | 'some long key': 'some\\nvalue', 233 | 'yet another long key': 3} 234 | )""")) 235 | 236 | 237 | class TestNodeActions: 238 | 239 | def test_attr_dict(self): 240 | node = ConfigNode({'a': {'b': 1}}) 241 | compare(node.a.data, expected={'b': 1}, strict=True) 242 | node.a.set('c') 243 | compare(node.data, expected={'a': 'c'}, strict=True) 244 | 245 | def test_item_dict(self): 246 | node = ConfigNode({'a': {'b': 1}}) 247 | compare(node.a.data, expected={'b': 1}, strict=True) 248 | node['a'].set('c') 249 | compare(node.data, expected={'a': 'c'}, strict=True) 250 | 251 | def test_item_list(self): 252 | node = ConfigNode([['a', 'b'], ['c', 'd']]) 253 | compare(node[0].data, expected=['a', 'b'], strict=True) 254 | node[0].set('e') 255 | compare(node.data, expected=['e', ['c', 'd']], strict=True) 256 | 257 | def test_items(self): 258 | node = ConfigNode({'a': [1], 'b': [2]}) 259 | keys = [] 260 | for key, child in node.items(): 261 | keys.append(key) 262 | child.set(child[0]*2) 263 | compare(node.data, expected={'a': 2, 'b': 4}, strict=True) 264 | compare(keys, expected=['a', 'b']) 265 | 266 | def test_iter_list(self): 267 | node = ConfigNode([['a', 'b'], ['c', 'd']]) 268 | for child in node: 269 | child.set(''.join(child.data)) 270 | compare(node.data, expected=['ab', 'cd'], strict=True) 271 | 272 | def test_node_empty(self): 273 | node = ConfigNode() 274 | assert node.node() is node 275 | 276 | def test_node_path_does_nothing(self): 277 | node = ConfigNode() 278 | assert node.node(source) is node 279 | 280 | def test_node_dict_item(self): 281 | node = ConfigNode({'a': 1, 'b': 2}) 282 | child = node.node('a') 283 | compare(child.data, expected=1) 284 | child.set(3) 285 | compare(node.data, expected={'a': 3, 'b': 2}, strict=True) 286 | 287 | def test_node_list_item(self): 288 | node = ConfigNode(['a', 'b']) 289 | child = node.node(0) 290 | compare(child.data, expected='a') 291 | child.set('c') 292 | compare(node.data, expected=['c', 'b'], strict=True) 293 | 294 | def test_node_not_there(self): 295 | node = ConfigNode() 296 | with ShouldRaise(NotPresent('a')): 297 | node.node('a.b.c') 298 | 299 | def test_node_not_there_create(self): 300 | root = ConfigNode() 301 | child = root.node('a.b.c', create=True) 302 | compare(root.data, expected={'a': {'b': {'c': {}}}}) 303 | child.set(1) 304 | compare(root.data, expected={'a': {'b': {'c': 1}}}) 305 | 306 | def test_path_no_text_attribute(self): 307 | node = ConfigNode({'a': {'b': {'c': '1'}}}) 308 | with ShouldRaise(TypeError("invalid path: convert(source['a']['b']['c'], int)")): 309 | node.node(convert(source['a']['b']['c'], int)) 310 | 311 | def test_node_and_get(self): 312 | node = ConfigNode({'a': 1, 'b': 2}) 313 | compare(node.node('a').get(), expected=1) 314 | 315 | def test_node_and_get_nested(self): 316 | node = ConfigNode({'a': {'b': 2}}) 317 | compare(node.node('a').get(), expected={'b': 2}) 318 | -------------------------------------------------------------------------------- /tests/test_mapping.py: -------------------------------------------------------------------------------- 1 | from argparse import Namespace 2 | from testfixtures import compare, ShouldRaise 3 | 4 | from configurator.mapping import source, load, convert, store, target, required, if_supplied, value 5 | from configurator.merge import MergeContext 6 | from configurator.path import NotPresent 7 | 8 | 9 | class TestSource: 10 | 11 | def test_root(self): 12 | data = {'foo'} 13 | compare(load(data, source), expected=data) 14 | 15 | def test_getitem(self): 16 | data = {'foo': 'bar'} 17 | compare(load(data, source['foo']), expected='bar') 18 | 19 | def test_index(self): 20 | data = ['a', 'b'] 21 | compare(load(data, source[1]), expected='b') 22 | 23 | def test_attr(self): 24 | data = Namespace(x=1) 25 | compare(load(data, source.x), expected=1) 26 | 27 | def test_required_and_present(self): 28 | data = ['a', 'b'] 29 | compare(load(data, required(source[1])), expected='b') 30 | 31 | def test_getitem_not_present(self): 32 | data = {} 33 | with ShouldRaise(NotPresent('foo')): 34 | load(data, required(source['foo'])) 35 | 36 | def test_getitem_nested_not_present(self): 37 | data = {} 38 | with ShouldRaise(NotPresent('foo')): 39 | load(data, required(source['foo']['bar'])) 40 | 41 | def test_index_not_present(self): 42 | data = ['a'] 43 | with ShouldRaise(NotPresent(1)): 44 | load(data, required(source[1])) 45 | 46 | def test_attr_not_present(self): 47 | data = Namespace() 48 | with ShouldRaise(NotPresent('x')): 49 | load(data, required(source.x)) 50 | 51 | def test_attr_nested_not_present(self): 52 | data = Namespace() 53 | with ShouldRaise(NotPresent('x')): 54 | load(data, required(source.x.y)) 55 | 56 | def test_getitem_not_present_okay(self): 57 | data = {} 58 | compare(load(data, source['foo']), expected=NotPresent('foo')) 59 | 60 | def test_index_not_present_okay(self): 61 | data = ['a'] 62 | compare(load(data, source[1]), expected=NotPresent(1)) 63 | 64 | def test_attr_not_present_okay(self): 65 | data = Namespace() 66 | compare(load(data, source.x), expected=NotPresent('x')) 67 | 68 | def test_nested(self): 69 | data = {'foo': ['a', 'b', Namespace(x=1)]} 70 | compare(load(data, source['foo'][2].x), expected=1) 71 | 72 | def test_nested_missing_okay(self): 73 | data = {'foo': []} 74 | compare(load(data, source['foo'][2].x), 75 | expected=NotPresent(2)) 76 | 77 | def test_string_item(self): 78 | data = {'foo': 'bar'} 79 | compare(load(data, 'foo'), expected='bar') 80 | 81 | def test_string_attr(self): 82 | data = Namespace(foo='bar') 83 | compare(load(data, 'foo'), expected='bar') 84 | 85 | def test_string_item_not_present(self): 86 | data = {} 87 | compare(load(data, 'foo'), expected=NotPresent('foo')) 88 | 89 | def test_string_attr_not_present(self): 90 | data = Namespace() 91 | compare(load(data, 'foo'), expected=NotPresent('foo')) 92 | 93 | def test_string_dotted(self): 94 | data = {'foo': Namespace(x=1)} 95 | compare(load(data, 'foo.x'), expected=1) 96 | 97 | def test_string_item_not_present_required(self): 98 | data = {} 99 | with ShouldRaise(NotPresent('foo')): 100 | load(data, required('foo')) 101 | 102 | def test_convert(self): 103 | data = Namespace(x='1') 104 | compare(load(data, convert(source.x, int)), expected=1) 105 | 106 | def test_convert_string(self): 107 | data = Namespace(x='1') 108 | compare(load(data, convert('x', int)), expected=1) 109 | 110 | def test_convert_not_present(self): 111 | data = {} 112 | compare(load(data, convert('x', int)), expected=NotPresent('x')) 113 | 114 | def test_insert(self): 115 | with ShouldRaise(TypeError('Cannot use insert() in source')): 116 | load(None, source.insert(0)) 117 | 118 | def test_append(self): 119 | with ShouldRaise(TypeError('Cannot use append() in source')): 120 | load(None, source.append()) 121 | 122 | def test_merge(self): 123 | with ShouldRaise(TypeError('Cannot use merge() in source')): 124 | load(None, source.merge()) 125 | 126 | def test_if_supplied_truthy(self): 127 | data = Namespace(x='1') 128 | compare(load(data, if_supplied(source.x)), expected='1') 129 | 130 | def test_if_supplied_false(self): 131 | data = Namespace(x=False) 132 | compare(load(data, if_supplied(source.x)), expected=False) 133 | 134 | def test_if_supplied_falsy(self): 135 | data = Namespace(x=None) 136 | compare(load(data, if_supplied(source.x)), expected=NotPresent(None)) 137 | 138 | def test_if_supplied_string(self): 139 | data = Namespace(x='1') 140 | compare(load(data, if_supplied('x')), expected='1') 141 | 142 | def test_if_supplied_empty_string(self): 143 | data = Namespace(x='') 144 | compare(load(data, if_supplied('x')), expected=NotPresent('')) 145 | 146 | def test_if_supplied_empty_list(self): 147 | data = Namespace(x=[]) 148 | compare(load(data, if_supplied('x')), expected=[]) 149 | 150 | def test_if_supplied_custom(self): 151 | data = Namespace(x='Unavailable') 152 | compare(load(data, if_supplied(source.x, false_values={'Unavailable'})), 153 | expected=NotPresent('Unavailable')) 154 | 155 | def test_if_supplied_required_falsy(self): 156 | data = Namespace(x=None) 157 | with ShouldRaise(NotPresent(None)): 158 | load(data, required(if_supplied(source.x))) 159 | 160 | def test_if_supplied_str(self): 161 | compare(str(if_supplied(source.x)), expected='if_supplied(source.x)') 162 | 163 | def test_value(self): 164 | compare(load(None, value(42)), expected=42) 165 | 166 | def test_value_if_supplied_falsy(self): 167 | compare(load({}, if_supplied(value(None))), expected=NotPresent(None)) 168 | 169 | 170 | class TestTarget: 171 | 172 | def test_root(self): 173 | data = {'foo'} 174 | with ShouldRaise(TypeError('Cannot store at root')): 175 | store(data, target, 'foo') 176 | compare(data, expected={'foo'}) 177 | 178 | def test_getitem(self): 179 | data = {'foo': 'bar'} 180 | store(data, target['foo'], 'baz') 181 | compare(data, expected={'foo': 'baz'}) 182 | 183 | def test_getitem_not_present(self): 184 | data = {'foo': 'bar'} 185 | store(data, target['foo'], NotPresent('foo')) 186 | compare(data, expected={'foo': 'bar'}) 187 | 188 | def test_index(self): 189 | data = ['a', 'b'] 190 | store(data, target[1], 'c') 191 | compare(data, expected=['a', 'c']) 192 | 193 | def test_index_not_present(self): 194 | data = ['a', 'b'] 195 | store(data, target[1], NotPresent(1)) 196 | compare(data, expected=['a', 'b']) 197 | 198 | def test_append(self): 199 | data = ['a', 'b'] 200 | store(data, target.append(), 'c') 201 | compare(data, expected=['a', 'b', 'c']) 202 | 203 | def test_append_nested(self): 204 | data = [] 205 | store(data, target.append()['a'], 'b') 206 | compare(data, expected=[{'a': 'b'}]) 207 | 208 | def test_append_not_present(self): 209 | data = [] 210 | store(data, target.append()['a'], NotPresent('foo')) 211 | compare(data, expected=[{}]) 212 | 213 | def test_insert(self): 214 | data = ['a', 'b'] 215 | store(data, target.insert(1), 'c') 216 | compare(data, expected=['a', 'c', 'b']) 217 | 218 | def test_insert_nested(self): 219 | data = [] 220 | store(data, target.insert(0)['a'], 'b') 221 | compare(data, expected=[{'a': 'b'}]) 222 | 223 | def test_insert_not_present(self): 224 | data = [] 225 | store(data, target.insert(0)['a'], NotPresent('foo')) 226 | compare(data, expected=[{}]) 227 | 228 | def test_attr(self): 229 | data = Namespace(x=1) 230 | store(data, target.x, 2) 231 | compare(data.x, 2) 232 | 233 | def test_attr_not_present(self): 234 | data = Namespace(x=1) 235 | store(data, target.x, NotPresent('foo')) 236 | compare(data.x, 1) 237 | 238 | def test_nested(self): 239 | data = {'foo': ['a', 'b', Namespace(x=1)]} 240 | store(data, target['foo'][2].x, 2) 241 | compare(data['foo'][2].x, expected=2) 242 | 243 | def test_string_item(self): 244 | data = {'foo': 'bar'} 245 | store(data, 'foo', 'baz') 246 | compare(data, expected={'foo': 'baz'}) 247 | 248 | def test_string_attr(self): 249 | data = Namespace(foo='bar') 250 | store(data, target.x, 2) 251 | compare(data.x, 2) 252 | 253 | def test_string_dotted(self): 254 | data = {'foo': Namespace(x=1)} 255 | store(data, 'foo.x', 2) 256 | compare(data['foo'].x, expected=2) 257 | 258 | def test_create_nested_dicts(self): 259 | # created needed dicts 260 | data = {} 261 | store(data, target['x']['y'], 2) 262 | compare(data, expected={'x': {'y': 2}}) 263 | 264 | def test_create_nested_attrs(self): 265 | # exception 266 | data = {} 267 | with ShouldRaise(AttributeError("'dict' object has no attribute 'x'")): 268 | store(data, target.x.y, 2) 269 | 270 | def test_create_from_dotted_string(self): 271 | data = {} 272 | store(data, 'x.y', 2) 273 | compare(data, expected={'x': {'y': 2}}) 274 | 275 | def test_create_nested_attrs_from_dotted_string(self): 276 | # exception 277 | data = Namespace() 278 | with ShouldRaise(AttributeError( 279 | "'Namespace' object has no attribute 'x'" 280 | )): 281 | store(data, 'x.y', 2) 282 | 283 | def test_set_on_convert(self): 284 | data = '1' 285 | with ShouldRaise(TypeError('Cannot use convert() as target')): 286 | store(data, convert(target, int), 'y') 287 | 288 | def test_ensure_on_convert(self): 289 | data = '1' 290 | with ShouldRaise(TypeError('Cannot use convert() as target')): 291 | store(data, convert(target, int).x, 'y') 292 | 293 | def test_set_on_required(self): 294 | data = '1' 295 | with ShouldRaise(TypeError('Cannot use required() as target')): 296 | store(data, required(target), 'y') 297 | 298 | def test_ensure_on_required(self): 299 | data = '1' 300 | with ShouldRaise(TypeError('Cannot use required() as target')): 301 | store(data, required(target).x, 'y') 302 | 303 | def test_set_on_supplied(self): 304 | data = '1' 305 | with ShouldRaise(TypeError('Cannot use if_supplied() as target')): 306 | store(data, if_supplied(target), 'y') 307 | 308 | def test_ensure_on_supplied(self): 309 | data = '1' 310 | with ShouldRaise(TypeError('Cannot use if_supplied() as target')): 311 | store(data, if_supplied(target).x, 'y') 312 | 313 | def test_merge(self): 314 | data = {'x': 1} 315 | data = store(data, target.merge(), {'y': 2}, MergeContext()) 316 | compare(data, expected={'x': 1, 'y': 2}) 317 | 318 | def test_merge_nested(self): 319 | data = {'x': {'y': 2}} 320 | store(data, target['x'].merge(), {'z': 1}, MergeContext()) 321 | compare(data, expected={'x': {'y': 2, 'z': 1}}) 322 | 323 | def test_merge_not_present(self): 324 | data = {'x': {'y': 2}} 325 | store(data, target['x'].merge(), NotPresent('foo'), MergeContext()) 326 | compare(data, expected={'x': {'y': 2}}) 327 | 328 | def test_ensure_on_merge(self): 329 | data = {} 330 | with ShouldRaise(TypeError('merge() must be final operation')): 331 | store(data, target.merge().x, 'y', MergeContext()) 332 | -------------------------------------------------------------------------------- /docs/patterns.rst: -------------------------------------------------------------------------------- 1 | Patterns of Use 2 | =============== 3 | 4 | The rest of the documentation explains how Configurator works in abstract, while 5 | the sections below provide concrete examples of how it can be used in various 6 | applications. 7 | 8 | .. invisible-code-block: python 9 | 10 | # help pyfakefs out... 11 | import os 12 | replace('os.environ.HOME', '/home/some_user', strict=False) 13 | replace('configurator.config.exists', os.path.exists) 14 | 15 | Layered config files 16 | -------------------- 17 | 18 | A common pattern is to have a system-wide configuration file, overlaid with 19 | an optional user-specific config file. For example: 20 | 21 | .. topic:: /etc/my_app.yml 22 | :class: write-file 23 | 24 | :: 25 | 26 | data_path: /var/wherever 27 | logging_level: warning 28 | foo_enabled: false 29 | 30 | .. topic:: /home/some_user/.my_app.yml 31 | :class: write-file 32 | 33 | :: 34 | 35 | logging_level: debug 36 | foo_enabled: true 37 | 38 | This could be loaded with a function such as this: 39 | 40 | .. code-block:: python 41 | 42 | from configurator import Config 43 | 44 | def load_config(): 45 | base = Config.from_path('/etc/my_app.yml') 46 | user = Config.from_path('~/.my_app.yml', optional=True) 47 | return base + user 48 | 49 | Using the two example config files would result in this config: 50 | 51 | >>> load_config() 52 | configurator.config.Config( 53 | {'data_path': '/var/wherever', 54 | 'foo_enabled': True, 55 | 'logging_level': 'debug'} 56 | ) 57 | 58 | .. _extends-pattern: 59 | 60 | Config file that extends another config file 61 | -------------------------------------------- 62 | 63 | With this :func:`pattern `, config files use a 64 | key to explicitly specify another config file that they extend. For example: 65 | 66 | .. topic:: base.yml 67 | :class: write-file 68 | 69 | :: 70 | 71 | data_path: /var/wherever 72 | logging_level: warning 73 | foo_enabled: false 74 | 75 | .. topic:: my_app.yml 76 | :class: write-file 77 | 78 | :: 79 | 80 | extends: base.yml 81 | logging_level: debug 82 | foo_enabled: true 83 | 84 | This could be loaded with a function such as this: 85 | 86 | .. code-block:: python 87 | 88 | from configurator import Config 89 | from configurator.patterns import load_with_extends 90 | 91 | Using the two example config files would result in this config: 92 | 93 | >>> load_with_extends('my_app.yml', key='extends') 94 | configurator.config.Config( 95 | {'data_path': '/var/wherever', 96 | 'foo_enabled': True, 97 | 'logging_level': 'debug'} 98 | ) 99 | 100 | Config files that include other config files 101 | -------------------------------------------- 102 | 103 | Another common pattern is to have an application-wide configuration 104 | file that includes sections of configuration from files to be found 105 | in a particular directory. For example: 106 | 107 | .. topic:: /etc/myapp.yml 108 | :class: write-file 109 | 110 | :: 111 | 112 | logging_level: warning 113 | 114 | .. topic:: /etc/myapp.d/site1.yaml 115 | :class: write-file 116 | 117 | :: 118 | 119 | domain: site1.example.com 120 | root: /var/sites/site1 121 | 122 | .. topic:: /etc/myapp.d/site2.yaml 123 | :class: write-file 124 | 125 | :: 126 | 127 | domain: site2.example.com 128 | root: ~someuser/site2 129 | 130 | This could be loaded with a function such as this: 131 | 132 | .. code-block:: python 133 | 134 | from configurator import Config, source, target 135 | from glob import glob 136 | 137 | def load_config(): 138 | config = Config({'sites': []}) 139 | config.merge(Config.from_path('/etc/myapp.yml')) 140 | for path in glob('/etc/myapp.d/*.y*ml'): 141 | config.merge(Config.from_path(path), mapping={source: target['sites'].append()}) 142 | return config 143 | 144 | Using the example config files above would result in this config: 145 | 146 | >>> load_config() 147 | configurator.config.Config( 148 | {'logging_level': 'warning', 149 | 'sites': [{'domain': 'site1.example.com', 150 | 'root': '/var/sites/site1'}, 151 | {'domain': 'site2.example.com', 152 | 'root': '~someuser/site2'}]} 153 | ) 154 | 155 | Config file overlaid with environment variables 156 | ----------------------------------------------- 157 | 158 | Environment variables provide a way to inject configuration into an application. 159 | This can often be to override configuration from a file but doesn't easily fit 160 | the schema of a config file. Environment variables are also hindered by the fact that 161 | they only natively able to have string values. 162 | 163 | .. invisible-code-block: python 164 | 165 | replace('os.environ.MYAPP_ENABLED', 'True', strict=False) 166 | replace('os.environ.MYAPP_THREADS', '13', strict=False) 167 | 168 | The mapping process Configurator offers can help with both of these problems. 169 | For example: 170 | 171 | .. topic:: myapp.yml 172 | :class: write-file 173 | 174 | :: 175 | 176 | enabled: false 177 | threads: 1 178 | 179 | The environment variables below can be mapped into the config file above. 180 | 181 | >>> os.environ.get('MYAPP_ENABLED') 182 | 'True' 183 | >>> os.environ.get('MYAPP_THREADS') 184 | '13' 185 | 186 | This could be done with a function such as this: 187 | 188 | .. code-block:: python 189 | 190 | from configurator import Config, convert 191 | from ast import literal_eval 192 | import os 193 | 194 | def load_config(): 195 | config = Config.from_path('myapp.yml') 196 | config.merge(os.environ, mapping={ 197 | convert('MYAPP_ENABLED', literal_eval): 'enabled', 198 | convert('MYAPP_THREADS', int): 'threads', 199 | }) 200 | return config 201 | 202 | Using the example config files above would result in this config: 203 | 204 | >>> load_config() 205 | configurator.config.Config({'enabled': True, 'threads': 13}) 206 | 207 | Config extracted from many environment variables 208 | ------------------------------------------------ 209 | 210 | If you have configuration that is spread across many environment 211 | variables that share a common naming pattern, the :any:`Config.from_env` 212 | class method can provide a succinct way to extract these. 213 | 214 | .. invisible-code-block: python 215 | 216 | replace('os.environ.MYAPP_POSTGRES_HOST', 'some-host', strict=False) 217 | replace('os.environ.MYAPP_POSTGRES_PORT', '5432', strict=False) 218 | replace('os.environ.MYAPP_REDIS_HOST', 'other-host', strict=False) 219 | replace('os.environ.MYAPP_REDIS_PORT', '6379', strict=False) 220 | 221 | For example, the following environment variables: 222 | 223 | >>> os.environ.get('MYAPP_POSTGRES_HOST') 224 | 'some-host' 225 | >>> os.environ.get('MYAPP_POSTGRES_PORT') 226 | '5432' 227 | >>> os.environ.get('MYAPP_REDIS_HOST') 228 | 'other-host' 229 | >>> os.environ.get('MYAPP_REDIS_PORT') 230 | '6379' 231 | 232 | A function such as the following could be used to load the configuration: 233 | 234 | .. code-block:: python 235 | 236 | from configurator import Config, convert 237 | from ast import literal_eval 238 | import os 239 | 240 | def load_config(): 241 | return Config.from_env( 242 | prefix={'MYAPP_POSTGRES_': 'postgres', 243 | 'MYAPP_REDIS_': 'redis'}, 244 | types={'_PORT': int} 245 | ) 246 | 247 | Using the example environment above would result in this config: 248 | 249 | >>> load_config() 250 | configurator.config.Config( 251 | {'postgres': {'host': 'some-host', 'port': 5432}, 252 | 'redis': {'host': 'other-host', 'port': 6379}} 253 | ) 254 | 255 | Config file with command line overrides 256 | --------------------------------------- 257 | 258 | Many applications allow you to specify the config file on the command line as well 259 | as options that override some of the file based configuration. 260 | 261 | For example, command line arguments could be parsed by a function such as this: 262 | 263 | .. code-block:: python 264 | 265 | from argparse import ArgumentParser, FileType 266 | 267 | def parse_args(): 268 | parser = ArgumentParser() 269 | parser.add_argument('config', type=FileType('r')) 270 | parser.add_argument('--verbose', action='store_true') 271 | parser.add_argument('--threads', type=int) 272 | return parser.parse_args() 273 | 274 | These arguments can be merged into the config they specify with a function such as thing: 275 | 276 | .. code-block:: python 277 | 278 | from configurator import Config, convert, if_supplied 279 | 280 | def verbose_to_level(verbose): 281 | if verbose: 282 | return 'debug' 283 | 284 | def load_config(args): 285 | config = Config.from_stream(args.config) 286 | config.merge(args, mapping={ 287 | convert('verbose', verbose_to_level): 'log_level', 288 | if_supplied('threads'): 'threads', 289 | }) 290 | return config 291 | 292 | So, given these command line arguments: 293 | 294 | .. invisible-code-block: python 295 | 296 | replace('sys.argv', ['myapp.py', 'myapp.yaml', '--verbose']) 297 | import sys 298 | 299 | >>> sys.argv 300 | ['myapp.py', 'myapp.yaml', '--verbose'] 301 | 302 | Along with a config file such as this: 303 | 304 | .. topic:: myapp.yaml 305 | :class: write-file 306 | 307 | :: 308 | 309 | log_level: warning 310 | threads: 1 311 | 312 | The two functions above would produce the following config: 313 | 314 | >>> args = parse_args() 315 | >>> load_config(args) 316 | configurator.config.Config({'log_level': 'debug', 'threads': 1}) 317 | 318 | Application and framework configuration in the same file 319 | -------------------------------------------------------- 320 | 321 | It can make sense for an application and the framework it's built with to make use of the 322 | same config file, particularly when combined with layered config files, as described above. This can 323 | allow all applications on a system to share a basic default config while providing overrides to 324 | that configuration along with their own configuration in an application-specific config file. 325 | 326 | What makes this work is keeping the application and framework configuration in separate top-level 327 | namespaces. For example: 328 | 329 | .. topic:: myapp.yml 330 | :class: write-file 331 | 332 | :: 333 | 334 | # framework configuration: 335 | logging: 336 | console_level: false 337 | file_level: warning 338 | 339 | # application configuration, containing within one top-level key: 340 | my_app: 341 | enabled: True 342 | threads: 1 343 | 344 | Configuring the framework and application then becomes dispatching the top-level config 345 | sections appropriately: 346 | 347 | .. invisible-code-block: python 348 | 349 | def configure_framework(app, logging): 350 | print('TheFramework running %s(%r)\nlogging: %r>' % (type(app).__name__, vars(app), logging)) 351 | 352 | .. code-block:: python 353 | 354 | from configurator import Config 355 | 356 | class MyApp: 357 | def __init__(self, enabled, threads): 358 | self.enabled, self.threads = enabled, threads 359 | 360 | def build_app(config_path): 361 | config = Config.from_path(config_path) 362 | app_config = config.my_app 363 | app = MyApp(**app_config.data) 364 | del config.my_app 365 | return configure_framework(app, **config.data) 366 | 367 | Combining the above function and configuration file might result in: 368 | 369 | >>> build_app('myapp.yml') 370 | TheFramework running MyApp({'enabled': True, 'threads': 1}) 371 | logging: {'console_level': False, 'file_level': 'warning'}> 372 | 373 | Global configuration object 374 | --------------------------- 375 | 376 | .. py:currentmodule:: configurator 377 | 378 | .. invisible-code-block: python 379 | 380 | from mock import Mock 381 | import pytest 382 | 383 | app = Mock() 384 | app.view.return_value = lambda func: func 385 | app.configurer = lambda func: func 386 | tempdir.write('myapp.yml', b'{"db_url": "..."}') 387 | 388 | For applications where there is no sensible path for passing a configuration 389 | object to the various parts that may need to access it, it can make sense to have a global 390 | :class:`Config` that has configuration pushed on to it at a different time to its creation. 391 | 392 | You may instantiate the :class:`Config` in a module global scope, potentially with 393 | some defaults: 394 | 395 | .. code-block:: python 396 | 397 | from configurator import Config 398 | 399 | config = Config({'default_deny': True}) 400 | 401 | You may then have a web layer that uses the common pattern of decorated functions to 402 | map URLs to the code that renders them, but that also need access to configuration information: 403 | 404 | .. code-block:: python 405 | 406 | @app.view('/') 407 | def root(request): 408 | db = connect(config.db_url) 409 | if config.default_deny and not db.query(Roles).filter_by(user=request.user): 410 | raise HttpForbidden() 411 | ... 412 | 413 | That same web layer may also have a hook or event that lets you configure the application during 414 | startup: 415 | 416 | .. code-block:: python 417 | 418 | @app.configurer 419 | def configure(): 420 | config.push(Config.from_path('myapp.yml')) 421 | 422 | 423 | .. invisible-code-block: python 424 | 425 | configure() 426 | connect = Mock() 427 | Roles = Mock() 428 | root(Mock()) 429 | 430 | Now, when testing, you can have a fixture that pushes configuration data suitable 431 | for use during automated tests: 432 | 433 | .. code-block:: python 434 | 435 | @pytest.fixture() 436 | def configured(): 437 | with config.push({'db_url': 'postgresql://localhost/test'}): 438 | yield config 439 | 440 | .. invisible-code-block: python 441 | 442 | assert tuple(configured.__wrapped__())[0] is config 443 | -------------------------------------------------------------------------------- /tests/test_config.py: -------------------------------------------------------------------------------- 1 | from ast import literal_eval 2 | from pathlib import Path 3 | from tempfile import NamedTemporaryFile 4 | 5 | import pickle 6 | import pytest 7 | 8 | from configurator import Config, default_mergers 9 | from io import StringIO 10 | from configurator.parsers import ParseError 11 | from configurator.mapping import source, target, convert, value 12 | from testfixtures import compare, ShouldRaise, TempDirectory, Replace 13 | 14 | 15 | def python_literal(stream): 16 | return literal_eval(stream.read()) 17 | 18 | 19 | class TestInstantiation: 20 | 21 | def test_empty(self): 22 | config = Config() 23 | compare(config.data, expected={}) 24 | 25 | def test_dict(self): 26 | config = Config(dict(x=1)) 27 | compare(config.x, expected=1) 28 | 29 | def test_list(self): 30 | config = Config([1, 2]) 31 | compare(config[0], expected=1) 32 | compare(config[1], expected=2) 33 | compare(list(config), expected=[1, 2]) 34 | 35 | def test_int(self): 36 | # not very useful... 37 | config = Config(1) 38 | compare(config.data, expected=1) 39 | 40 | def test_text_string_parser(self): 41 | config = Config.from_text('{"foo": "bar"}', 'json') 42 | compare(config.data, expected={'foo': 'bar'}) 43 | 44 | def test_bytes_string_parser(self): 45 | config = Config.from_text(b'{"foo": "bar"}', 'json') 46 | compare(config.data, expected={'foo': 'bar'}) 47 | 48 | def test_text_callable_parser(self): 49 | config = Config.from_text("{'foo': 'bar'}", python_literal) 50 | compare(config.data, expected={'foo': 'bar'}) 51 | 52 | def test_text_missing_parser(self): 53 | with ShouldRaise(ParseError("No parser found for 'lolwut'")): 54 | Config.from_text("{'foo': 'bar'}", 'lolwut') 55 | 56 | def test_path_guess_parser(self): 57 | with NamedTemporaryFile(suffix='.json') as source: 58 | source.write(b'{"x": 1}') 59 | source.flush() 60 | config = Config.from_path(source.name) 61 | compare(config.x, expected=1) 62 | 63 | def test_pathlib_guess_parser(self): 64 | with NamedTemporaryFile(suffix='.json') as source: 65 | source.write(b'{"x": 1}') 66 | source.flush() 67 | config = Config.from_path(Path(source.name)) 68 | compare(config.x, expected=1) 69 | 70 | def test_path_guess_parser_no_extension(self): 71 | with TempDirectory() as dir: 72 | path = dir.write('nope', b'{"x": 1}') 73 | with ShouldRaise(ParseError("No parser found for None")): 74 | Config.from_path(path) 75 | 76 | def test_path_guess_parser_bad_extension(self): 77 | with NamedTemporaryFile(suffix='.nope') as source: 78 | with ShouldRaise(ParseError("No parser found for 'nope'")): 79 | Config.from_path(source.name) 80 | 81 | def test_path_explicit_string_parser(self): 82 | with NamedTemporaryFile() as source: 83 | source.write(b'{"x": 1}') 84 | source.flush() 85 | config = Config.from_path(source.name, 'json') 86 | compare(config.x, expected=1) 87 | 88 | def test_path_explicit_callable_parser(self): 89 | with NamedTemporaryFile() as source: 90 | source.write(b'{"x": 1}') 91 | source.flush() 92 | config = Config.from_path(source.name, python_literal) 93 | compare(config.x, expected=1) 94 | 95 | def test_path_with_encoding(self): 96 | with NamedTemporaryFile() as source: 97 | source.write(b'{"x": "\xa3"}') 98 | source.flush() 99 | config = Config.from_path(source.name, 'json', encoding='latin-1') 100 | compare(config.x, expected=u'\xa3') 101 | 102 | def test_pathlib_with_encoding(self): 103 | with NamedTemporaryFile() as source: 104 | source.write(b'{"x": "\xa3"}') 105 | source.flush() 106 | config = Config.from_path(Path(source.name), 'json', encoding='latin-1') 107 | compare(config.x, expected=u'\xa3') 108 | 109 | def test_stream_with_name_guess_parser(self): 110 | with NamedTemporaryFile(suffix='.json') as source: 111 | source.write(b'{"x": 1}') 112 | source.flush() 113 | source.seek(0) 114 | config = Config.from_stream(source) 115 | compare(config.x, expected=1) 116 | 117 | def test_stream_no_name_no_parser(self): 118 | source = StringIO(u'{"x": 1}') 119 | with ShouldRaise(ParseError("No parser found for None")): 120 | Config.from_stream(source) 121 | 122 | def test_stream_string_parser(self): 123 | source = StringIO(u'{"x": 1}') 124 | config = Config.from_stream(source, 'json') 125 | compare(config.x, expected=1) 126 | 127 | def test_stream_callable_parser(self): 128 | source = StringIO(u'{"x": 1}') 129 | config = Config.from_stream(source, python_literal) 130 | compare(config.x, expected=1) 131 | 132 | @pytest.fixture() 133 | def env(self): 134 | env = {} 135 | with Replace('os.environ', env): 136 | yield env 137 | 138 | def test_from_env_single_prefix(self, env): 139 | env['FOO_BAR'] = 'one' 140 | env['FOO_BAZ'] = 'two' 141 | config = Config.from_env(prefix='FOO_') 142 | compare(config.data, expected={'bar': 'one', 'baz': 'two'}) 143 | 144 | def test_from_env_multiple_prefix(self, env): 145 | env['FOO_BAR'] = 'one' 146 | env['BOB_BAR'] = 'two' 147 | config = Config.from_env({'FOO_': 'foo', 'BOB_': 'bob'}) 148 | compare(config.data, expected={ 149 | 'foo': {'bar': 'one'}, 150 | 'bob': {'bar': 'two'} 151 | }) 152 | 153 | def test_from_env_target_dotted_string(self, env): 154 | env['FOO_BAR'] = 'one' 155 | env['FOO_BAZ'] = 'two' 156 | config = Config.from_env(prefix={'FOO_': 'a.b.c'}) 157 | compare(config.data, expected={ 158 | 'a': {'b': {'c': {'bar': 'one', 'baz': 'two'}}} 159 | }) 160 | 161 | def test_from_env_target_path(self, env): 162 | env['FOO_BAR'] = 'one' 163 | env['FOO_BAZ'] = 'two' 164 | config = Config.from_env(prefix={'FOO_': target['a']}) 165 | compare(config.data, expected={ 166 | 'a': {'bar': 'one', 'baz': 'two'} 167 | }) 168 | 169 | def test_from_env_type_suffix(self, env): 170 | env['FOO_BAR'] = '1' 171 | env['FOO_BAZ'] = '2' 172 | config = Config.from_env(prefix='FOO_', types={'_BAZ': int}) 173 | compare(config.data, expected={'bar': '1', 'baz': 2}) 174 | 175 | def test_from_env_value_is_empty_string(self, env): 176 | env['FOO_BAR'] = '' 177 | env['FOO_BAZ'] = '' 178 | config = Config.from_env(prefix='FOO_', types={'_BAZ': int}) 179 | compare(config.data, expected={}) 180 | 181 | 182 | class TestPushPop: 183 | 184 | def test_push_pop(self): 185 | config = Config({'x': 1, 'y': 2}) 186 | compare(config.x, expected=1) 187 | compare(config.y, expected=2) 188 | config.push(Config({'x': 3})) 189 | compare(config.x, expected=3) 190 | compare(config.y, expected=2) 191 | config.pop() 192 | compare(config.x, expected=1) 193 | compare(config.y, expected=2) 194 | 195 | def test_push_empty(self): 196 | config = Config({'x': 1, 'y': 2}) 197 | compare(config.x, expected=1) 198 | compare(config.y, expected=2) 199 | config.push(Config({'x': 3}), empty=True) 200 | compare(config.x, expected=3) 201 | with ShouldRaise(AttributeError('y')): 202 | config.y 203 | config.pop() 204 | compare(config.x, expected=1) 205 | compare(config.y, expected=2) 206 | 207 | def test_push_non_config(self): 208 | config = Config({'x': 1}) 209 | compare(config.x, expected=1) 210 | config.push({'x': 2}) 211 | compare(config.x, expected=2) 212 | 213 | def test_pop_without_push(self): 214 | config = Config({'x': 1, 'y': 2}) 215 | with ShouldRaise(IndexError('pop from empty list')): 216 | config.pop() 217 | 218 | def test_context_manager_push(self): 219 | config = Config({'x': 1, 'y': 2}) 220 | compare(config.x, expected=1) 221 | compare(config.y, expected=2) 222 | with config.push(Config({'x': 3})): 223 | compare(config.x, expected=3) 224 | compare(config.y, expected=2) 225 | compare(config.x, expected=1) 226 | compare(config.y, expected=2) 227 | 228 | def test_context_manager_push_pathological(self): 229 | config = Config({'x': 1, 'y': 2}) 230 | compare(config.x, expected=1) 231 | compare(config.y, expected=2) 232 | with config.push(): 233 | config.data['a'] = 5 234 | config.push({'x': 3}) 235 | config.push({'z': 4}) 236 | compare(config.a, expected=5) 237 | compare(config.x, expected=3) 238 | compare(config.y, expected=2) 239 | compare(config.z, expected=4) 240 | compare(config.x, expected=1) 241 | compare(config.y, expected=2) 242 | with ShouldRaise(AttributeError('a')): 243 | config.a 244 | with ShouldRaise(AttributeError('z')): 245 | config.z 246 | 247 | def test_context_manager_push_deep(self): 248 | config = Config({'x': {'y': 'z'}}) 249 | with config.push(): 250 | config.data['x']['y'] = 'a' 251 | compare(config.x.y, expected='a') 252 | compare(config.x.y, expected='z') 253 | 254 | 255 | class TestNodeBehaviour: 256 | 257 | def test_dict_access(self): 258 | config = Config({'foo': 'bar'}) 259 | compare(config['foo'], expected='bar') 260 | 261 | def test_attr_access(self): 262 | config = Config({'foo': 'bar'}) 263 | compare(config.foo, expected='bar') 264 | 265 | def test_get_on_name_conflict(self): 266 | config = Config({'merge': True}) 267 | assert config.merge is not True 268 | compare(config.get('merge'), expected=True) 269 | 270 | 271 | class TestMergeTests: 272 | 273 | def test_empty_config(self): 274 | config = Config() 275 | config.merge(Config()) 276 | compare(config.data, expected={}) 277 | 278 | def test_non_empty_config(self): 279 | config = Config({'foo': 'bar'}) 280 | config.merge(Config({'baz': 'bob'})) 281 | compare(config.data, {'foo': 'bar', 'baz': 'bob'}) 282 | 283 | def test_config_node(self): 284 | config = Config({'foo': 'bar'}) 285 | config.merge(Config({'root': {'baz': 'bob'}}).root) 286 | compare(config.data, {'foo': 'bar', 'baz': 'bob'}) 287 | 288 | def test_simple_type(self): 289 | config = Config() 290 | with ShouldRaise(TypeError( 291 | "Cannot merge with " 292 | )): 293 | config.merge('foo') 294 | 295 | def test_dict_to_dict(self): 296 | config = Config({'x': 1}) 297 | config.merge({'y': 2}) 298 | compare(config.data, expected={'x': 1, 'y': 2}) 299 | 300 | def test_list_to_list(self): 301 | config = Config([1, 2]) 302 | config.merge([3, 4]) 303 | compare(config.data, expected=[1, 2, 3, 4]) 304 | 305 | def test_dict_to_list(self): 306 | config1 = Config([1, 2]) 307 | config2 = Config({'x': 1}) 308 | with ShouldRaise(TypeError( 309 | "Cannot merge with " 310 | )): 311 | config1.merge(config2) 312 | 313 | def test_list_to_dict(self): 314 | config1 = Config({'x': 1}) 315 | config2 = Config([1, 2]) 316 | with ShouldRaise(TypeError( 317 | "Cannot merge with " 318 | )): 319 | config1.merge(config2) 320 | 321 | def test_other_to_dict(self): 322 | config1 = Config(1) 323 | config2 = Config(1) 324 | with ShouldRaise(TypeError( 325 | "Cannot merge with " 326 | )): 327 | config1.merge(config2) 328 | 329 | def test_nested_working(self): 330 | config1 = Config(dict(x=1, y=[2, 3], z=dict(a=4, b=5))) 331 | config2 = Config(dict(w=6, y=[7], z=dict(b=8, c=9))) 332 | config1.merge(config2) 333 | 334 | compare(config1.data, 335 | expected=dict(x=1, w=6, y=[2, 3, 7], z=dict(a=4, b=8, c=9))) 336 | 337 | def test_override_type_mapping(self): 338 | config1 = Config([1, 2]) 339 | config2 = Config([3, 4]) 340 | def zipper(context, source, target): 341 | return zip(target, source) 342 | config1.merge(config2, mergers={list: zipper}) 343 | compare(config1.data, expected=[(1, 3), (2, 4)]) 344 | 345 | def test_type_returns_new_object(self): 346 | config1 = Config((1, 2)) 347 | config2 = Config((3, 4)) 348 | def concat(context, source, target): 349 | return target + source 350 | config1.merge(config2, mergers={tuple: concat}) 351 | compare(config1.data, expected=(1, 2, 3, 4)) 352 | 353 | def test_blank_type_mapping(self): 354 | config1 = Config({'foo': 'bar'}) 355 | config2 = Config({'baz': 'bob'}) 356 | with ShouldRaise(TypeError( 357 | "Cannot merge with " 358 | )): 359 | config2.merge(config1, mergers={}) 360 | 361 | def test_supplement_type_mapping(self): 362 | config1 = Config({'x': (1, 2)}) 363 | config2 = Config({'x': (3, 4)}) 364 | def concat(context, source, target): 365 | return target + source 366 | config1.merge(config2, mergers=default_mergers+{tuple: concat}) 367 | compare(config1.data, expected={'x': (1, 2, 3, 4)}) 368 | 369 | def test_mapping_paths(self): 370 | config = Config({'x': 'old'}) 371 | data = {'foo': 'bar'} 372 | config.merge(data, mapping={ 373 | source['foo']: target['x'] 374 | }) 375 | compare(config.data, expected={'x': 'bar'}) 376 | 377 | def test_mapping_strings(self): 378 | config = Config({'x': 'old'}) 379 | data = {'foo': 'bar'} 380 | config.merge(data, mapping={ 381 | 'foo': 'x' 382 | }) 383 | compare(config.data, expected={'x': 'bar'}) 384 | 385 | def test_mapping_dotted_strings(self): 386 | config = Config({'a': {'b': 'old'}}) 387 | data = {'c': {'d': 'new'}} 388 | config.merge(data, mapping={ 389 | 'c.d': 'a.b' 390 | }) 391 | compare(config.data, expected={'a': {'b': 'new'}}) 392 | 393 | def test_mapping_type_conversion(self): 394 | config = Config({'x': 0}) 395 | data = {'y': '1'} 396 | config.merge(data, mapping={ 397 | convert(source['y'], int): target['x'] 398 | }) 399 | compare(config.data, expected={'x': 1}) 400 | 401 | def test_mapping_extensive_conversation(self): 402 | config = Config({'a': 0}) 403 | data = {'x': 2, 'y': -1} 404 | 405 | def best(possible): 406 | return max(possible.values()) 407 | 408 | config.merge(data, mapping={ 409 | convert(source, best): target['a'] 410 | }) 411 | 412 | compare(config.data, expected={'a': 2}) 413 | 414 | def test_mapping_with_merge(self): 415 | config = Config({'x': {'y': 1}}) 416 | data = {'z': 2} 417 | config.merge(data, mapping={ 418 | source: target['x'].merge() 419 | }) 420 | compare(config.data, expected={'x': {'y': 1, 'z': 2}}) 421 | 422 | def test_mapping_with_top_level_merge(self): 423 | config = Config({'x': {'y': 1}}) 424 | data = {'z': 2} 425 | config.merge(data, mapping={ 426 | source: target.merge() 427 | }) 428 | compare(config.data, expected={'x': {'y': 1}, 'z': 2}) 429 | 430 | def test_mapping_into_empty_dict(self): 431 | defaults = Config({ 432 | 'section1': {}, 433 | 'section2': {'nested': {}} 434 | }) 435 | config = Config() 436 | config.merge(defaults) 437 | config.merge({'value': 1}, mapping={'value': 'section1.value'}) 438 | config.merge({'value': 2}, mapping={'value': 'section2.nested.value'}) 439 | compare(config.data, expected={ 440 | 'section1': {'value': 1}, 441 | 'section2': {'nested': {'value': 2}} 442 | }) 443 | compare(defaults.data, expected={ 444 | 'section1': {}, 445 | 'section2': {'nested': {}} 446 | }) 447 | 448 | def test_mapping_only_source(self): 449 | config = Config() 450 | config.merge(mapping={ 451 | value(1): 'section1.value', 452 | value(2): 'section2.nested.value', 453 | }) 454 | compare(config.data, expected={ 455 | 'section1': {'value': 1}, 456 | 'section2': {'nested': {'value': 2}} 457 | }) 458 | 459 | def test_clone(self): 460 | config = Config({'dict': {'a': 1, 'b': 2}, 461 | 'list': [{'c': 3}, {'d': 4}]}) 462 | config_ = config.clone() 463 | assert config is not config_ 464 | compare(config_.data, expected=config.data) 465 | assert config.data is not config_.data 466 | assert config.data['dict'] is not config_.data['dict'] 467 | assert config.data['list'] is not config_.data['list'] 468 | assert config.data['list'][0] is not config_.data['list'][0] 469 | assert config.data['list'][1] is not config_.data['list'][1] 470 | 471 | 472 | class TestAddition: 473 | 474 | def test_top_level_dict(self): 475 | config1 = Config({'foo': 'bar'}) 476 | config2 = Config({'baz': 'bob'}) 477 | config3 = config1 + config2 478 | compare(config1.data, {'foo': 'bar'}) 479 | compare(config2.data, {'baz': 'bob'}) 480 | compare(config3.data, {'foo': 'bar', 'baz': 'bob'}) 481 | 482 | def test_top_level_list(self): 483 | config1 = Config([1, 2]) 484 | config2 = Config([3, 4]) 485 | config3 = config1 + config2 486 | compare(config1.data, [1, 2]) 487 | compare(config2.data, [3, 4]) 488 | compare(config3.data, [1, 2, 3, 4]) 489 | 490 | def test_non_config_rhs(self): 491 | config = Config({'foo': 'bar'}) + {'baz': 'bob'} 492 | compare(config.data, {'foo': 'bar', 'baz': 'bob'}) 493 | 494 | def test_failure(self): 495 | with ShouldRaise(TypeError( 496 | "Cannot merge with " 497 | )): 498 | Config({'foo': 'bar'}) + 1 499 | 500 | 501 | class TestSerialization: 502 | 503 | def test_pickle_default_protocol(self): 504 | config = Config({'foo': [1, 2]}) 505 | data = pickle.dumps(config) 506 | config_ = pickle.loads(data) 507 | compare(expected=config, actual=config_) 508 | 509 | def test_pickle_hickest_protocol(self): 510 | config = Config({'foo': [1, 2]}) 511 | data = pickle.dumps(config, pickle.HIGHEST_PROTOCOL) 512 | config_ = pickle.loads(data) 513 | compare(expected=config, actual=config_) 514 | -------------------------------------------------------------------------------- /docs/use.rst: -------------------------------------------------------------------------------- 1 | .. py:currentmodule:: configurator 2 | 3 | Using Configurator 4 | ================== 5 | 6 | This document goes into more detail than the quickstart and should cover enough 7 | functionality for most use cases. For examples of how to use this functionality, 8 | see :doc:`patterns`. For details of all the classes, methods and functions available, see 9 | the :doc:`api`. 10 | 11 | Getting configuration information 12 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 13 | 14 | Several ways of obtaining configuration information are available. 15 | 16 | Files, streams and text 17 | ----------------------- 18 | 19 | .. invisible-code-block: python 20 | 21 | fs.create_file('/etc/myapp.yml', 22 | contents='myapp:\n cache:\n location: /var/my_app/\n') 23 | 24 | 25 | The most common source of configuration information is reading from files. 26 | Given a file such as this: 27 | 28 | >>> print(open('/etc/myapp.yml').read()) 29 | myapp: 30 | cache: 31 | location: /var/my_app/ 32 | 33 | 34 | A :class:`Config` object can be obtained as follows: 35 | 36 | >>> from configurator import Config 37 | >>> Config.from_path('/etc/myapp.yml') 38 | configurator.config.Config({'myapp': {'cache': {'location': '/var/my_app/'}}}) 39 | 40 | If you already have an open stream, it would be this instead: 41 | 42 | >>> with open('/etc/myapp.yml') as source: 43 | ... Config.from_stream(source) 44 | configurator.config.Config({'myapp': {'cache': {'location': '/var/my_app/'}}}) 45 | 46 | Finally, if you have the text source in memory, you would do the following: 47 | 48 | >>> text = """ 49 | ... cache: 50 | ... location: /var/my_app/ 51 | ... """ 52 | >>> Config.from_text(text, 'yaml') 53 | configurator.config.Config({'cache': {'location': '/var/my_app/'}}) 54 | 55 | When parsing strings, the parser must be specified because we have no way of guessing. 56 | The ``parser`` parameter can also be used with :meth:`Config.from_path` and 57 | :meth:`Config.from_stream` to explicitly specify a parser, regardless of the name of 58 | the file. 59 | 60 | The parser can also be specified as a callable, if you have one-off unusual parsing needs: 61 | 62 | >>> text = """ 63 | ... {'format': 'not json'} 64 | ... """ 65 | >>> import ast 66 | >>> def python(stream): 67 | ... return ast.literal_eval(stream.read()) 68 | >>> Config.from_text(text, python) 69 | configurator.config.Config({'format': 'not json'}) 70 | 71 | If you need to add support for a new config file format or wish to use a different parser 72 | for existing file formats, see :ref:`parsers`. 73 | 74 | Environment variables 75 | --------------------- 76 | 77 | Configuration can be obtained from environment variables, the best approach depends on the 78 | number and type of variables you're starting from. 79 | 80 | If it's a small number and you need to add them do arbitrary configuration locations, 81 | then :doc:`mapping ` works well: 82 | 83 | .. invisible-code-block: python 84 | 85 | replace('os.environ.OMP_NUM_THREADS', '2', strict=False) 86 | replace('os.environ.CACHE_DIRECTORY', '/var/cache/it/', strict=False) 87 | import os 88 | 89 | >>> from configurator import Config, convert, required 90 | >>> config = Config() 91 | >>> config.merge(os.environ, { 92 | ... convert('OMP_NUM_THREADS', int): 'threads', 93 | ... required('CACHE_DIRECTORY'): 'cache.location', 94 | ... }) 95 | >>> config 96 | configurator.config.Config({'cache': {'location': '/var/cache/it/'}, 'threads': 2}) 97 | 98 | If you have many environment variables with the same prefix, :meth:`Config.from_env` 99 | can be used: 100 | 101 | .. invisible-code-block: python 102 | 103 | replace('os.environ', { 104 | 'MYAPP_THREADS': '2', 105 | 'MYAPP_CACHE_DIRECTORY': '/var/logs/myapp/' 106 | # pyfakefs installes a fake os module! 107 | }, strict=False) 108 | 109 | >>> os.environ['MYAPP_THREADS'] 110 | '2' 111 | >>> os.environ['MYAPP_CACHE_DIRECTORY'] 112 | '/var/logs/myapp/' 113 | >>> Config.from_env('MYAPP_') 114 | configurator.config.Config({'cache_directory': '/var/logs/myapp/', 'threads': '2'}) 115 | 116 | If the environment variables contain patterns that indicate their type as a suffix, then 117 | :meth:`~Config.from_env` can do the type conversion: 118 | 119 | .. invisible-code-block: python 120 | 121 | replace('os.environ', { 122 | 'MYAPP_SERVER_PORT': '4242', 123 | 'MYAPP_CACHE_PATH': '/tmp/myapp' 124 | }) 125 | 126 | For example, given the following environment variables: 127 | 128 | >>> os.environ.get('MYAPP_SERVER_PORT') 129 | '4242' 130 | >>> os.environ.get('MYAPP_CACHE_PATH') 131 | '/tmp/myapp' 132 | 133 | Configuration could be extracted as follows: 134 | 135 | >>> from pathlib import Path 136 | >>> Config.from_env(prefix='MYAPP_', types={'PORT': int, 'PATH': Path}) 137 | configurator.config.Config({'cache_path': PosixPath('/tmp/myapp'), 'server_port': 4242}) 138 | 139 | If different prefixes indicate different configuration locations, then ``prefix`` can be 140 | a mapping: 141 | 142 | 143 | .. invisible-code-block: python 144 | 145 | replace('os.environ', { 146 | 'MYAPP_POSTGRES_HOST': 'some-host', 147 | 'MYAPP_POSTGRES_PORT': '5432', 148 | 'MYAPP_REDIS_HOST': 'other-host', 149 | 'MYAPP_REDIS_PORT': '6379', 150 | }) 151 | 152 | >>> os.environ.get('MYAPP_POSTGRES_HOST') 153 | 'some-host' 154 | >>> os.environ.get('MYAPP_POSTGRES_PORT') 155 | '5432' 156 | >>> os.environ.get('MYAPP_REDIS_HOST') 157 | 'other-host' 158 | >>> os.environ.get('MYAPP_REDIS_PORT') 159 | '6379' 160 | >>> Config.from_env(prefix={ 161 | ... 'MYAPP_POSTGRES_': 'services.postgres', 162 | ... 'MYAPP_REDIS_': 'services.redis' 163 | ... }) 164 | configurator.config.Config( 165 | {'services': {'postgres': {'host': 'some-host', 'port': '5432'}, 166 | 'redis': {'host': 'other-host', 'port': '6379'}}} 167 | ) 168 | 169 | 170 | Other sources 171 | ------------- 172 | 173 | It is also quite normal to instantiate an empty :class:`Config` and then :doc:`merge ` 174 | configuration into it from several other sources: 175 | 176 | >>> Config() 177 | configurator.config.Config({}) 178 | 179 | If you already have a deserialized source of configuration information, you can 180 | wrap a :class:`Config` around it and use it from that point onwards: 181 | 182 | .. invisible-code-block: python 183 | 184 | import requests 185 | from testfixtures.mock import Mock 186 | requests = Mock() 187 | requests.get.return_value.json.return_value = {'cache': {'location': '/var/my_app/'}} 188 | replace('sys.modules.requests', requests, strict=False) 189 | 190 | >>> Config(requests.get('http://config-store/myapp.json').json()) 191 | configurator.config.Config({'cache': {'location': '/var/my_app/'}}) 192 | 193 | Accessing configuration information 194 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 195 | 196 | Configurator aims to provide access to configuration information in a simple and 197 | natural way, similar to the underlying python data structures but allowing both 198 | item and attribute access to be used interchangeably. 199 | 200 | So, with a config such as this: 201 | 202 | >>> config = Config({'logs': '/var/my_app/', 203 | ... 'sources': [{'url': 'https://example.com/1', 204 | ... 'username': 'user1', 205 | ... 'password': 'p1'}, 206 | ... {'url': 'https://example.com/2', 207 | ... 'username': 'user2', 208 | ... 'password': 'p2'}]}) 209 | 210 | The various parts can be accessed as follows: 211 | 212 | >>> config['logs'] 213 | '/var/my_app/' 214 | >>> for source in config['sources']: 215 | ... print(source['url'], source['username'], source['password']) 216 | https://example.com/1 user1 p1 217 | https://example.com/2 user2 p2 218 | 219 | Using item access allows configuration that contains both mappings and sequences to be 220 | traversed easily and reliably: 221 | 222 | >>> config['sources'][1]['url'] 223 | 'https://example.com/2' 224 | 225 | Where it's more natural, configuration can also be treated like a dictionary. 226 | For example, with this config: 227 | 228 | >>> config = Config({'databases': {'main': 'mysql://foo@bar/main', 229 | ... 'backup': 'mysql://baz@bob/backup'}}) 230 | 231 | You could iterate through the databases as follows: 232 | 233 | >>> for name, url in sorted(config['databases'].items()): 234 | ... print(name, url) 235 | backup mysql://baz@bob/backup 236 | main mysql://foo@bar/main 237 | 238 | Likewise, if a key may not be present: 239 | 240 | >>> config['databases'].get('read_only', default=config['databases'].get('backup')) 241 | 'mysql://baz@bob/backup' 242 | 243 | As a convenience, attribute access may also be used where possible. 244 | So, with a config such as this: 245 | 246 | >>> config = Config({'sources': [{'url': 'https://example.com/1', 247 | ... 'username': 'user1', 248 | ... 'password': 'p1'}, 249 | ... {'url': 'https://example.com/2', 250 | ... 'username': 'user2', 251 | ... 'password': 'p2'}]}) 252 | 253 | You could take advantage of attribute access as follows: 254 | 255 | >>> for source in config.sources: 256 | ... print(source.username, source.password) 257 | user1 p1 258 | user2 p2 259 | 260 | .. warning:: 261 | 262 | Care must be taken when using attribute access as methods and attributes provided by 263 | configurator will take precedence over any configuration information. 264 | 265 | As a fallback, every node in the config will have a :attr:`~node.ConfigNode.data` attribute 266 | that can be used to get hold of the underlying configuration information: 267 | 268 | >>> type(config.sources) 269 | 270 | >>> type(config.sources.data) 271 | 272 | >>> len(config.sources.data) 273 | 2 274 | 275 | .. warning:: 276 | :attr:`~node.ConfigNode.data` should not be modified as problems will occur 277 | if the :class:`~node.ConfigNode` hierarchy and :attr:`~node.ConfigNode.data` 278 | hierarchy become out of sync. 279 | 280 | If you want to have a :class:`~node.ConfigNode` even in the case of scalar values, then the 281 | :meth:`~node.ConfigNode.node` method can be used: 282 | 283 | >>> config = Config({'x': 1}) 284 | >>> config.node('x') 285 | configurator.node.ConfigNode(1) 286 | 287 | Combining sources of configuration 288 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 289 | 290 | It's rare that configuration for an application will come from a single source and 291 | so configurator makes it easy to combine them. 292 | 293 | Simple overlaying 294 | ----------------- 295 | 296 | The simplest way is by adding two :class:`Config` instances. This will recursively 297 | merge the underlying configuration data, unioning dictionary items and concatenating 298 | sequences: 299 | 300 | >>> config1 = Config({'mapping': {'a': 1, 'b': 2}, 'sequence': ['a']}) 301 | >>> config2 = Config({'mapping': {'b': 3, 'c': 4}, 'sequence': ['b']}) 302 | >>> config1 + config2 303 | configurator.config.Config({'mapping': {'a': 1, 'b': 3, 'c': 4}, 'sequence': ['a', 'b']}) 304 | 305 | Merging 306 | ------- 307 | 308 | If you need to have more control over this process, :meth:`Config.merge` allows 309 | you to specify how merging will be performed per python object type: 310 | 311 | >>> config1 = Config([1, 2, 3, 4, 5]) 312 | >>> config2 = Config([6, 7, 8, 9, 10]) 313 | 314 | In this case, we want to interleave the two lists when they are merged, which can be done 315 | with a function like this: 316 | 317 | .. code-block:: python 318 | 319 | from itertools import chain, zip_longest 320 | 321 | def alternate(context, source, target): 322 | return [i for i in chain.from_iterable(zip_longest(target, source)) if i] 323 | 324 | We can use this with the :any:`default_mergers` to ensure that all list that are merged 325 | are interleaved: 326 | 327 | >>> from configurator import default_mergers 328 | >>> config1.merge(config2, mergers=default_mergers+{list: alternate}) 329 | >>> config1 330 | configurator.config.Config([1, 6, 2, 7, 3, 8, 4, 9, 5, 10]) 331 | 332 | .. note:: 333 | :meth:`~Config.merge` mutates the :class:`Config` on which it is called 334 | while adding two :class:`Config` objects together leaves both of the source configs unmodified 335 | and returns a new :class:`Config`. 336 | 337 | .. invisible-code-block: python 338 | 339 | from testfixtures.mock import Mock 340 | import os 341 | replace('os.environ.BAZ', 'True', strict=False) 342 | 343 | For more detailed documentation, see :doc:`mapping`. 344 | 345 | Mapping 346 | ------- 347 | 348 | If you need more flexibility in how parts of the configuration source are mapped in, 349 | or if the source data structure is not compatible with merging, you can use a mapping: 350 | 351 | >>> source = Mock() 352 | >>> source.foo.bar = 'some_value' 353 | >>> config = Config({'bar': {'type': 'foo'}}) 354 | >>> config.merge(source, {'foo.bar': 'bar.name'}) 355 | >>> config 356 | configurator.config.Config({'bar': {'name': 'some_value', 'type': 'foo'}}) 357 | 358 | Mapping can also be used to convert data from a configuration source: 359 | 360 | >>> from configurator.mapping import convert 361 | >>> from ast import literal_eval 362 | >>> os.environ.get('BAZ') 363 | 'True' 364 | >>> config.merge(os.environ, {convert('BAZ', literal_eval): 'baz'}) 365 | >>> config 366 | configurator.config.Config({'bar': {'name': 'some_value', 'type': 'foo'}, 'baz': True}) 367 | 368 | There is a lot of flexibility in how mapping and merging can be performed. For 369 | detailed documentation on this see :doc:`mapping`. 370 | 371 | .. invisible-code-block: python 372 | 373 | fs.create_file('/etc/my_app/config.yaml', contents=""" 374 | actions: 375 | - checkout: 376 | repo: git@github.com:Simplistix/configurator.git 377 | branch: master 378 | - run: "cat /foo/bar" 379 | """) 380 | 381 | Modifying configuration 382 | ~~~~~~~~~~~~~~~~~~~~~~~ 383 | 384 | Once you have a :class:`Config` object, you may still need to modify the configuration 385 | information it contains. 386 | 387 | Adding and deleting 388 | ------------------- 389 | 390 | Items can be added to a config using the mapping interface: 391 | 392 | >>> config = Config() 393 | >>> config['meaning'] = 42 394 | >>> config 395 | configurator.config.Config({'meaning': 42}) 396 | 397 | If the name is compatible with Python syntax, then you can also use attribute assignment: 398 | 399 | >>> config.meaning = 'new' 400 | >>> config 401 | configurator.config.Config({'meaning': 'new'}) 402 | 403 | If you need to remove an item, then you can do this using the mapping interface: 404 | 405 | >>> del config['meaning'] 406 | >>> config 407 | configurator.config.Config({}) 408 | 409 | If the name is compatible with Python syntax, then you can also use the attribute interface: 410 | 411 | >>> config = Config({'meaning': 'life'}) 412 | >>> del config.meaning 413 | >>> config 414 | configurator.config.Config({}) 415 | 416 | If the configuration is a list, then modifying items can be done using the sequence interface: 417 | 418 | >>> config = Config(['item1', 'item2', 'item3']) 419 | >>> config[0] = 'new' 420 | >>> config 421 | configurator.config.Config(['new', 'item2', 'item3']) 422 | 423 | This can also be used to remove items: 424 | 425 | >>> del config[1] 426 | >>> config 427 | configurator.config.Config(['new', 'item3']) 428 | 429 | If you need to set an item deep within a nesting that may or may not exist, then 430 | :meth:`~node.ConfigNode.node` can be used: 431 | 432 | >>> config = Config({'foo': {}}) 433 | >>> config.node('foo.bar.baz', create=True).set(42) 434 | >>> config 435 | configurator.config.Config({'foo': {'bar': {'baz': 42}}}) 436 | 437 | If the location traverses through lists, then a :class:`~configurator.path.Path` starting 438 | from :any:`source ` can be used: 439 | 440 | >>> from configurator import source 441 | >>> config = Config([{'name': 'db1', 'password': 'compromised'}]) 442 | >>> config.node(source[0]['password']).set('secure') 443 | >>> config 444 | configurator.config.Config([{'name': 'db1', 'password': 'secure'}]) 445 | 446 | Pushing and popping 447 | ------------------- 448 | 449 | Some frameworks and patterns make use of a global configuration object which needs to be referenced 450 | before the configuration is obtained from its sources. For this reason, Configurator provides the 451 | facility to push configuration onto an existing :class:`Config` and later pop it off. 452 | 453 | For example, given this global config: 454 | 455 | >>> config = Config({'option1': 'default', 'option3': 'foo'}) 456 | 457 | Additional configuration can be pushed onto it once available: 458 | 459 | >>> config.push(Config({'option1': 'non-default', 'option2': 42})) 460 | 461 | >>> config 462 | configurator.config.Config({'option1': 'non-default', 'option2': 42, 'option3': 'foo'}) 463 | 464 | If that configuration is no longer relevant, it can be popped off: 465 | 466 | >>> config.pop() 467 | >>> config 468 | configurator.config.Config({'option1': 'default', 'option3': 'foo'}) 469 | 470 | This process can also be used for managing a context: 471 | 472 | >>> with config.push(Config({'option1': 'non-default'})): 473 | ... print(config['option1']) 474 | non-default 475 | 476 | If you wish to push an entirely new configuration, this can be done as follows: 477 | 478 | >>> config = Config({'option1': 'default', 'option3': 'foo'}) 479 | >>> with config.push(Config({'option1': 'non-default', 'option2': 42}), empty=True): 480 | ... print(config) 481 | configurator.config.Config({'option1': 'non-default', 'option2': 42}) 482 | 483 | You can also use this method to preserve configuration and restore it to its previous state 484 | as follows: 485 | 486 | >>> config = Config({'option1': 'default', 'option3': 'foo'}) 487 | >>> with config.push(): 488 | ... config['option1'] = 'bad' 489 | ... del config['option3'] 490 | >>> config 491 | configurator.config.Config({'option1': 'default', 'option3': 'foo'}) 492 | 493 | Cloning 494 | ------- 495 | 496 | If you need a complete and separate copy of a :class:`Config` for any reason, one can be 497 | obtained using the :meth:`~Config.clone` method: 498 | 499 | >>> original = Config({'x': {'y': 'z'}}) 500 | >>> scratch = original.clone() 501 | >>> scratch['a'] = 'b' 502 | >>> scratch.node('x.y').set('z-') 503 | >>> scratch 504 | configurator.config.Config({'a': 'b', 'x': {'y': 'z-'}}) 505 | >>> original 506 | configurator.config.Config({'x': {'y': 'z'}}) 507 | 508 | Transforming 509 | ------------ 510 | 511 | One other form of manipulation that's worth mentioning is when incoming data isn't 512 | quite the right shape. Take this YAML: 513 | 514 | >>> print(open('/etc/my_app/config.yaml').read()) 515 | 516 | actions: 517 | - checkout: 518 | repo: git@github.com:Simplistix/configurator.git 519 | branch: master 520 | - run: "cat /foo/bar" 521 | 522 | 523 | The actions, while easy to read, aren't homogeneous or easy for the application to use. 524 | It might be easier if they were something like: 525 | 526 | .. code-block:: python 527 | 528 | {'actions': [{'type': 'checkout', 'kw': {'repo': '...', 'branch': 'master'}}, 529 | {'type': 'run', 'args': ('cat /foo/var',)}]} 530 | 531 | We can achieve this by modifying the data in the :class:`Config` programmatically 532 | with a function such as this: 533 | 534 | .. code-block:: python 535 | 536 | def normalise(actions): 537 | for action in actions: 538 | (type_, params), = action.data.items() 539 | if isinstance(params, dict): 540 | data = {'type': type_, 'args': (), 'kw': params} 541 | else: 542 | data = {'type': type_, 'args': (params,), 'kw': {}} 543 | action.set(data) 544 | 545 | This can be applied to the raw config as follows: 546 | 547 | >>> config = Config.from_path('/etc/my_app/config.yaml') 548 | >>> normalise(config.actions) 549 | 550 | .. invisible-code-block: python 551 | 552 | from testfixtures.mock import MagicMock 553 | action_handlers = MagicMock() 554 | 555 | Now, the application code can use the config in a uniform way: 556 | 557 | >>> for action in config.actions: 558 | ... output = action_handlers[action.type](*action.args, **action.kw.data) 559 | 560 | .. _parsers: 561 | 562 | Adding new parsers 563 | ~~~~~~~~~~~~~~~~~~ 564 | 565 | .. py:currentmodule:: configurator 566 | 567 | When creating :class:`Config` instances using :meth:`~Config.from_text`, 568 | :meth:`~Config.from_stream` or :meth:`~Config.from_path` you may have to specify a parser. 569 | This can be either a string or a callable. 570 | 571 | When it's a callable, which should be rare, the callable should take a single argument 572 | that will be the stream from which text can be read. A nested python data structure 573 | containing the parsed results of the stream should be returned, made up of only simple python 574 | data types as would be returned by :func:`ast.literal_eval`. 575 | 576 | More commonly, it will either be deduced from the extension of the file being processed or 577 | can be provided as a textual file extension such as ``'yaml'``, ``'toml'`` or ``'json'``. 578 | Where these require third party libraries, you may need to install extras for them to be 579 | available: 580 | 581 | .. code-block:: bash 582 | 583 | pip install configurator[yaml,toml] 584 | --------------------------------------------------------------------------------