├── .github └── workflows │ ├── docs.yml │ ├── publish.yml │ └── run_tests.yml ├── .gitignore ├── CHANGELOG.md ├── CONTRIBUTING.md ├── LICENSE ├── README.md ├── docs ├── CONTRIBUTING.md ├── changelog.md ├── index.md └── reference.md ├── mkdocs.yml ├── pyproject.toml ├── src └── config │ ├── .gitignore │ ├── __init__.py │ ├── configuration.py │ ├── configuration_set.py │ ├── contrib │ ├── __init__.py │ ├── aws.py │ ├── azure.py │ ├── gcp.py │ └── vault.py │ ├── helpers.py │ └── py.typed └── tests ├── __init__.py ├── contrib ├── __init__.py ├── test_aws.py ├── test_azure.py ├── test_gcp.py └── test_vault.py ├── python_config.py ├── python_config_2.py ├── test_basic.py ├── test_configuration_set.py ├── test_datatypes.py ├── test_dict_like.py ├── test_dotenv.py ├── test_env.py ├── test_ini.py ├── test_interpolation.py ├── test_issues.py ├── test_json.py ├── test_nested.py ├── test_path.py ├── test_python.py ├── test_toml.py ├── test_validation.py └── test_yaml.py /.github/workflows/docs.yml: -------------------------------------------------------------------------------- 1 | name: Docs 2 | 3 | on: 4 | release: 5 | types: 6 | - published 7 | 8 | permissions: 9 | contents: write 10 | 11 | env: 12 | PYTHON_VERSION: 3.11 13 | 14 | jobs: 15 | deploy: 16 | runs-on: ubuntu-latest 17 | 18 | steps: 19 | - name: Checkout 20 | uses: actions/checkout@v4 21 | 22 | - name: Configure Git Credentials 23 | run: | 24 | git config user.name github-actions[bot] 25 | git config user.email github-actions[bot]@users.noreply.github.com 26 | 27 | - name: Set up Python 28 | uses: actions/setup-python@v5 29 | with: 30 | python-version: ${{ env.PYTHON_VERSION }} 31 | 32 | - name: Install hatch 33 | run: | 34 | python -m pip install hatch 35 | 36 | - name: Build and Publish 37 | run: hatch run docs:mkdocs gh-deploy --force 38 | -------------------------------------------------------------------------------- /.github/workflows/publish.yml: -------------------------------------------------------------------------------- 1 | name: Publish to PyPI 2 | 3 | on: 4 | release: 5 | types: 6 | - published 7 | 8 | permissions: 9 | contents: read 10 | 11 | env: 12 | PYTHON_VERSION: 3.11 13 | 14 | jobs: 15 | deploy: 16 | runs-on: ubuntu-latest 17 | 18 | steps: 19 | - name: Checkout 20 | uses: actions/checkout@v4 21 | 22 | - name: Set up Python 23 | uses: actions/setup-python@v5 24 | with: 25 | python-version: ${{ env.PYTHON_VERSION }} 26 | 27 | - name: Install hatch 28 | run: | 29 | python -m pip install hatch 30 | 31 | - name: Build package 32 | run: hatch build 33 | 34 | - name: Publish 35 | run: hatch publish -y 36 | env: 37 | HATCH_INDEX_AUTH: ${{ secrets.HATCH_INDEX_AUTH }} 38 | -------------------------------------------------------------------------------- /.github/workflows/run_tests.yml: -------------------------------------------------------------------------------- 1 | name: Tests 2 | 3 | on: 4 | push: 5 | branches: 6 | - main 7 | pull_request: 8 | 9 | concurrency: 10 | group: ${{ github.workflow }}-${{ github.ref }} 11 | cancel-in-progress: true 12 | 13 | jobs: 14 | testing: 15 | runs-on: ubuntu-latest 16 | name: Testing 17 | strategy: 18 | matrix: 19 | python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"] 20 | 21 | steps: 22 | - name: Checkout 23 | uses: actions/checkout@v4 24 | 25 | - name: Set up Python ${{ matrix.python-version }} 26 | uses: actions/setup-python@v5 27 | with: 28 | python-version: ${{ matrix.python-version }} 29 | 30 | - name: Install hatch 31 | run: | 32 | python -m pip install hatch 33 | 34 | - name: Run pytest 35 | run: | 36 | hatch build --hooks-only 37 | hatch run +py=${{ matrix.python-version }} testing:test --cov=./ --cov-report=xml 38 | 39 | - name: Coverage report 40 | uses: codecov/codecov-action@v1 41 | 42 | docs: 43 | runs-on: ubuntu-latest 44 | name: Build Docs 45 | 46 | steps: 47 | - name: Checkout 48 | uses: actions/checkout@v4 49 | 50 | - name: Set up Python 51 | uses: actions/setup-python@v5 52 | with: 53 | python-version: 3.x 54 | 55 | - name: Install hatch 56 | run: | 57 | python -m pip install hatch 58 | 59 | - name: Build 60 | run: hatch run docs:mkdocs build 61 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *.egg 2 | *.egg-info 3 | *.pyc 4 | *$py.class 5 | *~ 6 | .coverage 7 | coverage.xml 8 | build/ 9 | dist/ 10 | site/ 11 | .tox/ 12 | env*/ 13 | tmp/ 14 | .venv* 15 | .cache 16 | .eggs 17 | cover 18 | .pytest* 19 | .mypy_cache 20 | .idea 21 | .vscode 22 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # Changelog 2 | 3 | All notable changes to this project will be documented in this file. 4 | 5 | ## [Unreleased] 6 | 7 | 8 | ## [0.12.1] - 2024-07-23 9 | 10 | ### Changed 11 | 12 | - JsonSchema validation now has an extra parameter for nested schemas. 13 | 14 | 15 | ## [0.12.0] - 2024-07-23 16 | 17 | ### Added 18 | 19 | - Granular `strip_prefix` parameters across different config types 20 | 21 | ### Fixed 22 | 23 | - Unit tests for .toml files 24 | 25 | ### Changed 26 | 27 | - Environment files are now loaded from filenames with a suffix of `.env` or starting with `.env` 28 | 29 | 30 | ## [0.11.0] - 2024-04-23 31 | 32 | ### Changed 33 | 34 | - Allow passing a `pathlib.Path` parameter to methods that read configuration files 35 | - Support comments in .env files 36 | 37 | 38 | ## [0.10.0] - 2024-02-19 39 | 40 | ### Changed 41 | 42 | - Use the standard lib for `toml` in Python >= 3.11 43 | - Switched to `hatch` instead of `poetry` 44 | 45 | 46 | ## [0.9.1] - 2023-08-06 47 | 48 | ### Added 49 | 50 | - Allow to pass a `ignore_missing_paths` parameter to each config method 51 | - Support for Hashicorp Vault credentials (in `config.contrib`) 52 | - Added a `validate` method to validate `Configuration` instances against a [json schema](https://json-schema.org/understanding-json-schema/basics.html#basics). 53 | 54 | 55 | ## [0.9.0] - 2023-08-04 56 | 57 | ### Added 58 | 59 | - Added the `section_prefix` parameter that filters sections by prefix in INI/toml files 60 | - Allow the `ignore_missing_paths` parameter to be specified individually on Configuration Sets 61 | 62 | ### Fixed 63 | 64 | - Errors when passing objects implementing `Mapping` instead of `dict` 65 | - Comparison to objects that are not a `Mapping` 66 | 67 | ### Changed 68 | 69 | - Replaced TravisCI with GitHub Actions 70 | 71 | 72 | ## [0.8.3] - 2021-10-11 73 | 74 | ### Fixed 75 | 76 | - Configurations from ini file won't be converted to lower case if `lowercase_keys = False` 77 | 78 | 79 | ## [0.8.2] - 2021-01-30 80 | 81 | ### Fixed 82 | 83 | - The behavior of merging sets was incorrect since version 0.8.0 84 | 85 | 86 | ## [0.8.0] - 2020-08-01 87 | 88 | ### Changed 89 | 90 | - The behavior of the dict-like methods `keys`, `items`, and `values` now give only the first level configuration keys instead of the old behavior of returning all the nested keys. To achieve the same behavior as before, use the `dotter_iter` context manager: 91 | 92 | ```python 93 | cfg.keys() # returns only the top level keys 94 | 95 | with cfg.dotted_iter(): 96 | cfg.keys() # returns all the keys at all depths using '.' as a separator 97 | ``` 98 | 99 | ### Fixed 100 | 101 | - Configuration objects are now immutable 102 | 103 | ### Added 104 | 105 | - Attribute dictionaries 106 | - Support for _.env_-type files 107 | - Option for deep interpolation. To activate that mode, use one of the enum values in `InterpolateEnumType` as the `interpolate_type` parameter. This allows for hierachical _templates_, in which configuration objects use the values from lower ones to interpolate instead of simply overriding. 108 | 109 | 110 | ## [0.7.1] - 2020-07-05 111 | 112 | ### Fixed 113 | 114 | - Installation with `poetry` because of changes to pytest-black 115 | 116 | 117 | ## [0.7.0] - 2020-05-06 118 | 119 | ### Added 120 | 121 | - New string interpolation feature 122 | 123 | 124 | ## [0.6.1] - 2020-04-24 125 | 126 | ### Changed 127 | 128 | - Added a `separator` argument to `config` function 129 | 130 | 131 | ## [0.6.0] - 2020-01-22 132 | 133 | ### Added 134 | 135 | - Added missing `dict` methods so a `Configuration` instance acts like a dictionary for most use cases 136 | - Added a `reload` method to refresh a `Configuration` instance (can be used to reload a configuration from a file that may have changed). 137 | - Added a `configs` method to expose the underlying instances of a `ConfigurationSet` 138 | 139 | 140 | ## [0.5.0] - 2020-01-08 141 | 142 | ### Added 143 | 144 | - Support for Azure Key Vault credentials (in `config.contrib`) 145 | - Support for AWS Secrets Manager credentials (in `config.contrib`) 146 | - Tox support 147 | 148 | ### Changed 149 | 150 | - Changed the `__repr__` and `__str__` methods so possibly sensitive values are not printed by default. 151 | 152 | 153 | ## [0.4.0] - 2019-10-11 154 | 155 | ### Added 156 | 157 | - Allow path-based failures using the `config` function. 158 | - Added a levels option to the dict-like objects. 159 | 160 | 161 | ## [0.3.1] - 2019-08-20 162 | 163 | ### Added 164 | 165 | - Project now builds fine on ReadTheDocs 166 | - TravisCI support 167 | - Codecov 168 | 169 | 170 | ## [0.3.0] - 2019-08-16 171 | 172 | ### Changed 173 | 174 | - Changed the old behavior in which every key was converted to lower case. 175 | 176 | 177 | ## [0.2.0] - 2019-07-16 178 | 179 | ### Added 180 | 181 | - Added Sphinx documentation 182 | - Added a `remove_levels` parameter to the config function 183 | 184 | 185 | ## [0.1.0] - 2019-01-16 186 | 187 | ### Added 188 | 189 | - Initial version 190 | 191 | [unreleased]: https://github.com/tr11/python-configuration/compare/0.11.0...HEAD 192 | [0.11.0]: https://github.com/tr11/python-configuration/compare/0.10.0...0.11.0 193 | [0.10.0]: https://github.com/tr11/python-configuration/compare/0.9.1...0.10.0 194 | [0.9.1]: https://github.com/tr11/python-configuration/compare/0.9.0...0.9.1 195 | [0.9.0]: https://github.com/tr11/python-configuration/compare/0.8.3...0.9.0 196 | [0.8.3]: https://github.com/tr11/python-configuration/compare/0.8.2...0.8.3 197 | [0.8.2]: https://github.com/tr11/python-configuration/compare/0.8.0...0.8.2 198 | [0.8.0]: https://github.com/tr11/python-configuration/compare/0.7.1...0.8.0 199 | [0.7.1]: https://github.com/tr11/python-configuration/compare/0.7.0...0.7.1 200 | [0.7.0]: https://github.com/tr11/python-configuration/compare/0.6.1...0.7.0 201 | [0.6.1]: https://github.com/tr11/python-configuration/compare/0.6.0...0.6.1 202 | [0.6.0]: https://github.com/tr11/python-configuration/compare/0.5.0...0.6.0 203 | [0.5.0]: https://github.com/tr11/python-configuration/compare/0.4.0...0.5.0 204 | [0.4.0]: https://github.com/tr11/python-configuration/compare/0.3.1...0.4.0 205 | [0.3.1]: https://github.com/tr11/python-configuration/compare/0.3.0...0.3.1 206 | [0.3.0]: https://github.com/tr11/python-configuration/compare/0.2.0...0.3.0 207 | [0.2.0]: https://github.com/tr11/python-configuration/compare/0.1.0...0.2.0 208 | [0.1.0]: https://github.com/tr11/python-configuration/releases/tag/0.1.0 209 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing to python-configuration 2 | 3 | First off, thanks for taking the time to contribute! 4 | 5 | ## Setting up a dev environment 6 | 7 | 1. Fork the [`tr11/python-configuration`](https://github.com/tr11/python-configuration) GitHub repo. 8 | 1. Clone the fork: 9 | 10 | ```shell 11 | git clone https://github.com//python-configuration.git 12 | cd python-configuration 13 | ``` 14 | 15 | 1. Use [`hatch`](https://hatch.pypa.io/) to generate a version file and install the dependencies 16 | 17 | ```shell 18 | hatch build --hooks-only # generate a version file from the git commit 19 | # or 20 | hatch build 21 | ``` 22 | 23 | ### Running the tests 24 | 25 | To run the tests (which include linting and type checks), run: 26 | ```shell 27 | hatch run test:test 28 | ``` 29 | 30 | Before opening a PR, make sure to run 31 | ```shell 32 | hatch run testing:test 33 | ``` 34 | which executes the previous test command on all Python versions supported by the library. 35 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2024 Tiago Requeijo 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # python-configuration 2 | > A library to load configuration parameters hierarchically from multiple sources and formats 3 | 4 | [![Hatch project](https://img.shields.io/badge/%F0%9F%A5%9A-Hatch-4051b5.svg)](https://github.com/pypa/hatch) 5 | [![version](https://img.shields.io/pypi/v/python-configuration)](https://pypi.org/project/python-configuration/) 6 | ![python](https://img.shields.io/pypi/pyversions/python-configuration) 7 | ![wheel](https://img.shields.io/pypi/wheel/python-configuration) 8 | ![license](https://img.shields.io/pypi/l/python-configuration) 9 | [![tests](https://github.com/tr11/python-configuration/actions/workflows/run_tests.yml/badge.svg)](https://github.com/tr11/python-configuration/actions/workflows/run_tests.yml) 10 | [![codecov](https://codecov.io/gh/tr11/python-configuration/branch/main/graph/badge.svg?token=5zRYlGnDs7)](https://codecov.io/gh/tr11/python-configuration) 11 | [![Documentation](https://github.com/tr11/python-configuration/actions/workflows/docs.yml/badge.svg)](https://github.com/tr11/python-configuration/actions/workflows/docs.yml) 12 | 13 | This library is intended as a helper mechanism to load configuration files hierarchically. 14 | 15 | ## Supported Formats 16 | 17 | The `python-configuration` library supports the following configuration formats and sources: 18 | 19 | - Python files 20 | - Dictionaries 21 | - Environment variables 22 | - Filesystem paths 23 | - JSON files 24 | - INI files 25 | - dotenv type files 26 | - Optional support for: 27 | - YAML files: requires `yaml` 28 | - TOML files: requires `tomli` for Python < 3.11 29 | - Azure Key Vault credentials: requires `azure-keyvault` 30 | - AWS Secrets Manager credentials: requires `boto3` 31 | - GCP Secret Manager credentials: requires `google-cloud-secret-manager` 32 | - Hashicorp Vault credentials: requires `hvac` 33 | 34 | 35 | ## Installing 36 | 37 | To install the library: 38 | 39 | ```shell 40 | pip install python-configuration 41 | ``` 42 | 43 | To include the optional TOML and/or YAML loaders, install the optional dependencies `toml` and ` yaml`. For example, 44 | 45 | ```shell 46 | pip install python-configuration[toml,yaml] 47 | ``` 48 | 49 | Without the optional dependencies, the TOML (Python < 3.11) and YAML loaders will not be available, 50 | and attempting to use them will raise an exception. 51 | 52 | ## Getting started 53 | 54 | `python-configuration` converts the various config types into dictionaries with dotted-based keys. For example, given this JSON configuration 55 | 56 | ```json 57 | { 58 | "a": { 59 | "b": "value" 60 | } 61 | } 62 | ``` 63 | 64 | We can use the `config_from_json` method to parse it: 65 | 66 | ```python 67 | from config import config_from_json 68 | 69 | cfg = config_from_json("my_config_file.json", read_from_file=True) 70 | ``` 71 | 72 | (Similar methods exist for all the other supported configuration formats (eg. `config_from_toml`, etc.).) 73 | 74 | We are then able to refer to the parameters in the config above using any of: 75 | 76 | ```python 77 | cfg['a.b'] 78 | cfg['a']['b'] 79 | cfg['a'].b 80 | cfg.a.b 81 | ``` 82 | 83 | and extract specific data types such as dictionaries: 84 | 85 | ```python 86 | cfg['a'].as_dict == {'b': 'value'} 87 | ``` 88 | 89 | This is particularly useful in order to isolate group parameters. 90 | For example, with the JSON configuration 91 | 92 | ```json 93 | { 94 | "database.host": "something", 95 | "database.port": 12345, 96 | "database.driver": "name", 97 | "app.debug": true, 98 | "app.environment": "development", 99 | "app.secrets": "super secret", 100 | "logging": { 101 | "service": "service", 102 | "token": "token", 103 | "tags": "tags" 104 | } 105 | } 106 | ``` 107 | 108 | one can retrieve the dictionaries as 109 | 110 | ```python 111 | cfg.database.as_dict() 112 | cfg.app.as_dict() 113 | cfg.logging.as_dict() 114 | ``` 115 | 116 | or simply as 117 | 118 | ```python 119 | dict(cfg.database) 120 | dict(cfg.app) 121 | dict(cfg.logging) 122 | ``` 123 | 124 | ## Configuration 125 | 126 | There are two general types of objects in this library. The first one is the `Configuration`, which represents a single config source. The second is a `ConfigurationSet` that allows for multiple `Configuration` objects to be specified. 127 | 128 | ### Single Config 129 | 130 | #### Python Files 131 | 132 | To load a configuration from a Python module, the `config_from_python` can be used. 133 | The first parameter must be a Python module and can be specified as an absolute path to the Python file or as an importable module. 134 | 135 | Optional parameters are the `prefix` and `separator`. The following call 136 | 137 | ```python 138 | config_from_python('foo.bar', prefix='CONFIG', separator='__') 139 | ``` 140 | 141 | will read every variable in the `foo.bar` module that starts with `CONFIG__` and replace every occurrence of `__` with a `.`. For example, 142 | 143 | ```python 144 | # foo.bar 145 | CONFIG__AA__BB_C = 1 146 | CONFIG__AA__BB__D = 2 147 | CONF__AA__BB__D = 3 148 | ``` 149 | 150 | would result in the configuration 151 | 152 | ```python 153 | { 154 | 'aa.bb_c': 1, 155 | 'aa.bb.d': 2, 156 | } 157 | ``` 158 | 159 | Note that the single underscore in `BB_C` is not replaced and the last line is not prefixed by `CONFIG`. 160 | 161 | #### Dictionaries 162 | 163 | Dictionaries are loaded with `config_from_dict` and are converted internally to a flattened `dict`. 164 | 165 | ```python 166 | { 167 | 'a': { 168 | 'b': 'value' 169 | } 170 | } 171 | ``` 172 | 173 | becomes 174 | 175 | ```python 176 | { 177 | 'a.b': 'value' 178 | } 179 | ``` 180 | 181 | #### Environment Variables 182 | 183 | Environment variables starting with `prefix` can be read with `config_from_env`: 184 | 185 | ```python 186 | config_from_env(prefix, separator='_') 187 | ``` 188 | 189 | #### Filesystem Paths 190 | 191 | Folders with files named as `xxx.yyy.zzz` can be loaded with the `config_from_path` function. This format is useful to load mounted Kubernetes [ConfigMaps](https://kubernetes.io/docs/tasks/configure-pod-container/configure-pod-configmap/#populate-a-volume-with-data-stored-in-a-configmap) or [Secrets](https://kubernetes.io/docs/tasks/inject-data-application/distribute-credentials-secure/#create-a-pod-that-has-access-to-the-secret-data-through-a-volume). 192 | 193 | #### JSON, INI, .env, YAML, TOML 194 | 195 | JSON, INI, YAML, TOML files are loaded respectively with 196 | `config_from_json`, 197 | `config_from_ini`, 198 | `config_from_dotenv`, 199 | `config_from_yaml`, and 200 | `config_from_toml`. 201 | The parameter `read_from_file` controls whether a string should be interpreted as a filename. 202 | 203 | ###### Caveats 204 | 205 | In order for `Configuration` objects to act as `dict` and allow the syntax `dict(cfg)`, the `keys()` method is implemented as the typical `dict` keys. If `keys` is an element in the configuration `cfg` then the `dict(cfg)` call will fail. In that case, it's necessary to use the `cfg.as_dict()` method to retrieve the `dict` representation for the `Configuration` object. 206 | 207 | The same applies to the methods `values()` and `items()`. 208 | 209 | 210 | ### Configuration Sets 211 | 212 | Configuration sets are used to hierarchically load configurations and merge settings. Sets can be loaded by constructing a `ConfigurationSet` object directly or using the simplified `config` function. 213 | 214 | To construct a `ConfigurationSet`, pass in as many of the simple `Configuration` objects as needed: 215 | 216 | ```python 217 | cfg = ConfigurationSet( 218 | config_from_env(prefix=PREFIX), 219 | config_from_json(path, read_from_file=True), 220 | config_from_dict(DICT), 221 | ) 222 | ``` 223 | The example above will read first from Environment variables prefixed with `PREFIX`, and fallback first to the JSON file at `path`, and finally use the dictionary `DICT`. 224 | 225 | The `config` function simplifies loading sets by assuming some defaults. 226 | The example above can also be obtained by 227 | 228 | ```python 229 | cfg = config( 230 | ('env', PREFIX), 231 | ('json', path, True), 232 | ('dict', DICT), 233 | ) 234 | ``` 235 | 236 | or, even simpler if `path` points to a file with a `.json` suffix: 237 | 238 | ```python 239 | cfg = config('env', path, DICT, prefix=PREFIX) 240 | ``` 241 | 242 | The `config` function automatically detects the following: 243 | 244 | * extension `.py` for python modules 245 | * dot-separated python identifiers as a python module (e.g. `foo.bar`) 246 | * extension `.json` for JSON files 247 | * extension `.yaml` for YAML files 248 | * extension `.toml` for TOML files 249 | * extension `.ini` for INI files 250 | * extension `.env` for dotenv type files 251 | * filesystem folders as Filesystem Paths 252 | * the strings `env` or `environment` for Environment Variables 253 | 254 | #### Merging Values 255 | 256 | `ConfigurationSet` instances are constructed by inspecting each configuration source, taking into account nested dictionaries, and merging at the most granular level. 257 | For example, the instance obtained from `cfg = config(d1, d2)` for the dictionaries below 258 | 259 | ```python 260 | d1 = {'sub': {'a': 1, 'b': 4}} 261 | d2 = {'sub': {'b': 2, 'c': 3}} 262 | ``` 263 | 264 | is such that `cfg['sub']` equals 265 | 266 | ```python 267 | {'a': 1, 'b': 4, 'c': 3} 268 | ``` 269 | 270 | Note that the nested dictionaries of `'sub'` in each of `d1` and `d2` do not overwrite each other, but are merged into a single dictionary with keys from both `d1` and `d2`, giving priority to the values of `d1` over those from `d2`. 271 | 272 | 273 | ###### Caveats 274 | 275 | As long as the data types are consistent across all the configurations that are part of a `ConfigurationSet`, the behavior should be straightforward. When different configuration objects are specified with competing data types, the first configuration to define the elements sets its datatype. For example, if in the example above `element` is interpreted as a `dict` from environment variables, but the JSON file specifies it as anything else besides a mapping, then the JSON value will be dropped automatically. 276 | 277 | ## Other Features 278 | 279 | ###### String Interpolation 280 | 281 | When setting the `interpolate` parameter in any `Configuration` instance, the library will perform a string interpolation step using the [str.format](https://docs.python.org/3/library/string.html#formatstrings) syntax. In particular, this allows to format configuration values automatically: 282 | 283 | ```python 284 | cfg = config_from_dict({ 285 | "percentage": "{val:.3%}", 286 | "with_sign": "{val:+f}", 287 | "val": 1.23456, 288 | }, interpolate=True) 289 | 290 | assert cfg.val == 1.23456 291 | assert cfg.with_sign == "+1.234560" 292 | assert cfg.percentage == "123.456%" 293 | ``` 294 | 295 | ###### Validation 296 | 297 | Validation relies on the [jsonchema](https://github.com/python-jsonschema/jsonschema) library, which is automatically installed using the extra `validation`. To use it, call the `validate` method on any `Configuration` instance in a manner similar to what is described on the `jsonschema` library: 298 | 299 | ```python 300 | schema = { 301 | "type" : "object", 302 | "properties" : { 303 | "price" : {"type" : "number"}, 304 | "name" : {"type" : "string"}, 305 | }, 306 | } 307 | 308 | cfg = config_from_dict({"name" : "Eggs", "price" : 34.99}) 309 | assert cfg.validate(schema) 310 | 311 | cfg = config_from_dict({"name" : "Eggs", "price" : "Invalid"}) 312 | assert not cfg.validate(schema) 313 | 314 | # pass the `raise_on_error` parameter to get the traceback of validation failures 315 | cfg.validate(schema, raise_on_error=True) 316 | # ValidationError: 'Invalid' is not of type 'number' 317 | ``` 318 | 319 | To use the [format](https://python-jsonschema.readthedocs.io/en/latest/validate/#validating-formats) feature of the `jsonschema` library, the extra dependencies must be installed separately as explained in the documentation of `jsonschema`. 320 | 321 | ```python 322 | from jsonschema import Draft202012Validator 323 | 324 | schema = { 325 | "type" : "object", 326 | "properties" : { 327 | "ip" : {"format" : "ipv4"}, 328 | }, 329 | } 330 | 331 | cfg = config_from_dict({"ip": "10.0.0.1"}) 332 | assert cfg.validate(schema, format_checker=Draft202012Validator.FORMAT_CHECKER) 333 | 334 | cfg = config_from_dict({"ip": "10"}) 335 | assert not cfg.validate(schema, format_checker=Draft202012Validator.FORMAT_CHECKER) 336 | 337 | # with the `raise_on_error` parameter: 338 | c.validate(schema, raise_on_error=True, format_checker=Draft202012Validator.FORMAT_CHECKER) 339 | # ValidationError: '10' is not a 'ipv4' 340 | ``` 341 | 342 | 343 | ## Extras 344 | 345 | The `config.contrib` package contains extra implementations of the `Configuration` class used for special cases. Currently the following are implemented: 346 | 347 | * `AzureKeyVaultConfiguration` in `config.contrib.azure`, which takes Azure Key Vault 348 | credentials into a `Configuration`-compatible instance. To install the needed dependencies 349 | execute 350 | 351 | ```shell 352 | pip install python-configuration[azure] 353 | ``` 354 | 355 | * `AWSSecretsManagerConfiguration` in `config.contrib.aws`, which takes AWS Secrets Manager 356 | credentials into a `Configuration`-compatible instance. To install the needed dependencies 357 | execute 358 | 359 | ```shell 360 | pip install python-configuration[aws] 361 | ``` 362 | 363 | * `GCPSecretManagerConfiguration` in `config.contrib.gcp`, which takes GCP Secret Manager 364 | credentials into a `Configuration`-compatible instance. To install the needed dependencies 365 | execute 366 | 367 | ```shell 368 | pip install python-configuration[gcp] 369 | ``` 370 | 371 | * `HashicorpVaultConfiguration` in `config.contrib.vault`, which takes Hashicorp Vault 372 | credentials into a `Configuration`-compatible instance. To install the needed dependencies 373 | execute 374 | 375 | ```shell 376 | pip install python-configuration[vault] 377 | ``` 378 | 379 | ## Features 380 | 381 | * Load multiple configuration types 382 | * Hierarchical configuration 383 | * Ability to override with environment variables 384 | * Merge parameters from different configuration types 385 | 386 | ## Contributing 387 | 388 | If you'd like to contribute, please fork the repository and use a feature branch. Pull requests are welcome. 389 | 390 | See [`CONTRIBUTING.md`](CONTRIBUTING.md) for the details. 391 | 392 | ## Links 393 | 394 | - Repository: https://github.com/tr11/python-configuration 395 | - Issue tracker: https://github.com/tr11/python-configuration/issues 396 | - Documentation: https://python-configuration.readthedocs.io 397 | 398 | ## Licensing 399 | 400 | The code in this project is licensed under MIT license. 401 | -------------------------------------------------------------------------------- /docs/CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | ../CONTRIBUTING.md -------------------------------------------------------------------------------- /docs/changelog.md: -------------------------------------------------------------------------------- 1 | ../CHANGELOG.md -------------------------------------------------------------------------------- /docs/index.md: -------------------------------------------------------------------------------- 1 | ../README.md -------------------------------------------------------------------------------- /docs/reference.md: -------------------------------------------------------------------------------- 1 | # Reference 2 | 3 | ::: config 4 | ::: config.configuration 5 | ::: config.configuration_set 6 | -------------------------------------------------------------------------------- /mkdocs.yml: -------------------------------------------------------------------------------- 1 | site_name: Python Configuration Docs 2 | repo_url: "https://github.com/tr11/python-configuration" 3 | repo_name: "tr11/python-configuration" 4 | copyright: Copyright © 2024 Tiago Requeijo 5 | 6 | theme: 7 | name: material 8 | palette: 9 | - media: "(prefers-color-scheme)" 10 | toggle: 11 | icon: material/brightness-auto 12 | name: Switch to light mode 13 | - media: "(prefers-color-scheme: light)" 14 | scheme: default 15 | primary: teal 16 | accent: purple 17 | toggle: 18 | icon: material/weather-sunny 19 | name: Switch to dark mode 20 | - media: "(prefers-color-scheme: dark)" 21 | scheme: slate 22 | primary: black 23 | accent: lime 24 | toggle: 25 | icon: material/weather-night 26 | name: Switch to system preference 27 | 28 | validation: 29 | omitted_files: warn 30 | absolute_links: warn 31 | unrecognized_links: warn 32 | 33 | nav: 34 | - Home: index.md 35 | - Contributing: CONTRIBUTING.md 36 | - Change Log: changelog.md 37 | - Reference: reference.md 38 | 39 | plugins: 40 | - search 41 | - autorefs 42 | - mkdocstrings: 43 | handlers: 44 | python: 45 | paths: [src] 46 | 47 | extra: 48 | social: 49 | - icon: fontawesome/brands/github 50 | link: https://github.com/tr11/python-configuration 51 | - icon: fontawesome/brands/python 52 | link: https://pypi.org/project/python-configuration/ 53 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [project] 2 | name = "python-configuration" 3 | dynamic = ["version"] 4 | description = "A library to load configuration parameters from multiple sources and formats" 5 | readme = 'README.md' 6 | requires-python = ">=3.8" 7 | keywords = ['configuration', 'settings', 'json', 'yaml', 'toml', 'ini'] 8 | license = { text = "MIT" } 9 | authors = [{ name = "Tiago Requeijo", email = "tiago.requeijo.dev@gmail.com" }] 10 | classifiers = [ 11 | "Programming Language :: Python", 12 | "Programming Language :: Python :: 3.8", 13 | "Programming Language :: Python :: 3.9", 14 | "Programming Language :: Python :: 3.10", 15 | "Programming Language :: Python :: 3.11", 16 | "Programming Language :: Python :: 3.12", 17 | "Programming Language :: Python :: Implementation :: CPython", 18 | "Programming Language :: Python :: Implementation :: PyPy", 19 | ] 20 | dependencies = [] 21 | 22 | [project.urls] 23 | Homepage = "https://tr11.github.io/python-configuration/" 24 | Documentation = "https://tr11.github.io/python-configuration/" 25 | Issues = "https://github.com/tr11/python-configuration/issues" 26 | Source = "https://github.com/tr11/python-configuration" 27 | 28 | [project.optional-dependencies] 29 | # cloud 30 | aws = ["boto3>=1.28.20"] 31 | azure = ["azure-keyvault>=4.2.0", "azure-identity"] 32 | gcp = ["google-cloud-secret-manager>=2.16.3"] 33 | vault = ["hvac>=1.1.1"] 34 | # file formats 35 | toml = ["tomli>=2.0.1"] 36 | yaml = ["pyyaml>=6.0"] 37 | # utilities 38 | validation = ["jsonschema>=4.21.1"] 39 | # groups 40 | cloud = ["python-configuration[aws,azure,gcp,vault]"] 41 | file-formats = ["python-configuration[toml,yaml]"] 42 | 43 | [tool.hatch.version] 44 | source = "vcs" 45 | 46 | [tool.hatch.build.hooks.vcs] 47 | version-file = "src/config/_version.py" 48 | 49 | [tool.hatch.build.targets.wheel] 50 | packages = ["src/config"] 51 | 52 | [tool.hatch.publish.index] 53 | disable = true 54 | 55 | [tool.hatch.envs.default] 56 | 57 | [tool.hatch.envs.docs] 58 | dependencies = [ 59 | "mkdocs>=1.5.3", 60 | "mkdocstrings>=0.24.0", 61 | "mkdocs-material>=9.5.9", 62 | "mkdocstrings-python", 63 | ] 64 | 65 | 66 | [tool.hatch.envs.lint] 67 | detached = true 68 | dependencies = ["mypy>=1.8.0", "pydocstyle>=6.3.0", "ruff>=0.2.2"] 69 | features = ["cloud", "file-formats", "validation"] 70 | 71 | [tool.hatch.envs.lint.scripts] 72 | lint = "ruff check src" 73 | typing = "mypy src" 74 | docs = "pydocstyle src" 75 | 76 | [tool.hatch.envs.test] 77 | template = "lint" 78 | dependencies = [ 79 | "pytest-cov>=4.1.0", 80 | "pytest-mock>=3.12.0", 81 | "pytest-mypy>=0.10.3", 82 | "pytest-ruff>=0.2.1", 83 | "pytest>=8.0.1", 84 | ] 85 | 86 | [tool.hatch.envs.test.scripts] 87 | test = "pytest" 88 | 89 | [tool.hatch.envs.testing] 90 | description = "Testing Environment to run all\nthe tests across different\nPython versions." 91 | template = "test" 92 | 93 | [[tool.hatch.envs.testing.matrix]] 94 | python = ["3.8", "3.9", "3.10", "3.11", "3.12"] 95 | 96 | [tool.hatch.envs.dev] 97 | template = "test" 98 | path = ".venv" 99 | features = ["cloud", "file-formats", "validation"] 100 | 101 | [tool.ruff] 102 | line-length = 88 103 | exclude = ["docs", "src/config/_version.py"] 104 | 105 | [tool.ruff.lint.pydocstyle] 106 | convention = "google" 107 | 108 | [tool.ruff.lint] 109 | ignore = ['D203', 'D212'] 110 | select = [ 111 | 'F', 112 | 'E', 113 | 'W', 114 | 'I', 115 | 'N', 116 | 'D', 117 | 'B', 118 | 'A', 119 | 'COM', 120 | 'C4', 121 | 'T20', 122 | 'Q', 123 | 'SIM', 124 | ] 125 | 126 | [tool.mypy] 127 | warn_return_any = true 128 | warn_unused_configs = true 129 | disallow_untyped_calls = true 130 | disallow_untyped_defs = true 131 | disallow_subclassing_any = true 132 | disallow_any_decorated = true 133 | disallow_incomplete_defs = true 134 | disallow_untyped_decorators = true 135 | no_implicit_optional = true 136 | warn_unused_ignores = true 137 | warn_redundant_casts = true 138 | exclude = ['tests'] 139 | 140 | [[tool.mypy.overrides]] 141 | module = [ 142 | 'google.auth.credentials', 143 | 'yaml', 144 | 'toml', 145 | 'boto3', 146 | 'botocore.exceptions', 147 | 'hvac', 148 | 'hvac.exceptions', 149 | 'jsonschema', 150 | 'jsonschema.exceptions', 151 | 'azure.identity', 152 | ] 153 | ignore_missing_imports = true 154 | 155 | [tool.coverage.run] 156 | branch = true 157 | include = ['src/*'] 158 | omit = ['src/config/_version.py'] 159 | 160 | [tool.coverage.html] 161 | directory = 'cover' 162 | 163 | [tool.pytest.ini_options] 164 | minversion = "8.0" 165 | pythonpath = ["src"] 166 | addopts = '--cov --cov-report=html --cov-report term-missing --ruff --mypy' 167 | filterwarnings = [ 168 | 'ignore::pytest.PytestDeprecationWarning', 169 | 'ignore::DeprecationWarning', 170 | 'ignore::pytest.PytestWarning', 171 | ] 172 | 173 | [build-system] 174 | requires = ["hatchling", "hatch-vcs"] 175 | build-backend = "hatchling.build" 176 | -------------------------------------------------------------------------------- /src/config/.gitignore: -------------------------------------------------------------------------------- 1 | _version.py 2 | __pycache__ -------------------------------------------------------------------------------- /src/config/configuration.py: -------------------------------------------------------------------------------- 1 | """Configuration class.""" 2 | 3 | import base64 4 | from contextlib import contextmanager 5 | from copy import deepcopy 6 | from sys import version_info 7 | from typing import ( 8 | Any, 9 | Dict, 10 | ItemsView, 11 | Iterator, 12 | KeysView, 13 | List, 14 | Mapping, 15 | Optional, 16 | Tuple, 17 | Union, 18 | ValuesView, 19 | cast, 20 | ) 21 | 22 | from .helpers import ( 23 | AttributeDict, 24 | InterpolateEnumType, 25 | InterpolateType, 26 | as_bool, 27 | clean, 28 | interpolate_object, 29 | ) 30 | 31 | if version_info < (3, 8): # pragma: no cover 32 | from collections import OrderedDict 33 | 34 | 35 | class Configuration: 36 | """Configuration class. 37 | 38 | The Configuration class takes a dictionary input with keys such as 39 | 40 | - ``a1.b1.c1`` 41 | - ``a1.b1.c2`` 42 | - ``a1.b2.c1`` 43 | - ``a1.b2.c2`` 44 | - ``a2.b1.c1`` 45 | - ``a2.b1.c2`` 46 | - ``a2.b2.c1`` 47 | - ``a2.b2.c2`` 48 | """ 49 | 50 | def __init__( 51 | self, 52 | config_: Mapping[str, Any], 53 | lowercase_keys: bool = False, 54 | interpolate: InterpolateType = False, 55 | interpolate_type: InterpolateEnumType = InterpolateEnumType.STANDARD, 56 | ): 57 | """Class Constructor. 58 | 59 | Params: 60 | config_: a mapping of configuration values. Keys need to be strings. 61 | lowercase_keys: whether to convert every key to lower case. 62 | """ 63 | self._lowercase = lowercase_keys 64 | self._interpolate = {} if interpolate is True else interpolate 65 | self._interpolate_type = interpolate_type 66 | self._config: Dict[str, Any] = self._flatten_dict(config_) 67 | self._default_levels: Optional[int] = 1 68 | 69 | def __eq__(self, other): # type: ignore 70 | """Equality operator.""" 71 | if not isinstance(other, (Configuration, Mapping)): 72 | return False 73 | return self.as_dict() == Configuration(other).as_dict() 74 | 75 | def _filter_dict(self, d: Dict[str, Any], prefix: str) -> Dict[str, Any]: 76 | """Filter a dictionary and return the items that are prefixed by `prefix`. 77 | 78 | Params: 79 | d: dictionary. 80 | prefix: prefix to filter on. 81 | """ 82 | if self._lowercase: 83 | return { 84 | k[(len(prefix) + 1) :].lower(): v 85 | for k, v in d.items() 86 | for k, v in d.items() 87 | if k.startswith(prefix + ".") 88 | } 89 | else: 90 | return { 91 | k[(len(prefix) + 1) :]: v 92 | for k, v in d.items() 93 | if k.startswith(prefix + ".") 94 | } 95 | 96 | def _flatten_dict(self, d: Mapping[str, Any]) -> Dict[str, Any]: 97 | """Flatten one level of a dictionary. 98 | 99 | Params: 100 | d: dict. 101 | 102 | Returns: 103 | a flattened dict. 104 | """ 105 | nested = {k for k, v in d.items() if isinstance(v, (Mapping, Configuration))} 106 | if self._lowercase: 107 | result = { 108 | k.lower() + "." + ki: vi 109 | for k in nested 110 | for ki, vi in self._flatten_dict(d[k]).items() 111 | } 112 | result.update( 113 | (k.lower(), v) 114 | for k, v in d.items() 115 | if not isinstance(v, (Mapping, Configuration)) 116 | ) 117 | else: 118 | result = { 119 | k + "." + ki: vi 120 | for k in nested 121 | for ki, vi in self._flatten_dict(d[k]).items() 122 | } 123 | result.update( 124 | (k, v) 125 | for k, v in d.items() 126 | if not isinstance(v, (Mapping, Configuration)) 127 | ) 128 | return result 129 | 130 | def _get_subset(self, prefix: str) -> Union[Dict[str, Any], Any]: 131 | """Return the subset of the config dictionary whose keys start with `prefix`. 132 | 133 | Params: 134 | prefix: string. 135 | 136 | Returns: 137 | dict. 138 | """ # noqa: E501 139 | d = { 140 | k[(len(prefix) + 1) :]: v 141 | for k, v in self._config.items() 142 | if k.startswith(prefix + ".") 143 | } 144 | if not d: 145 | prefixes = prefix.split(".") 146 | if len(prefixes) == 1: 147 | return deepcopy(self._config.get(prefix, {})) 148 | d = self._config 149 | while prefixes: # pragma: no branches 150 | p = prefixes[0] 151 | new_d = self._filter_dict(d, p) 152 | if new_d == {}: 153 | return deepcopy(d.get(p, {}) if len(prefixes) == 1 else {}) 154 | d = new_d 155 | prefixes = prefixes[1:] 156 | return deepcopy(d) 157 | 158 | def __getitem__(self, item: str) -> Union["Configuration", Any]: # noqa: D105 159 | v = self._get_subset(item) 160 | 161 | if v == {}: 162 | raise KeyError(item) 163 | if isinstance(v, Mapping): 164 | return Configuration(v) 165 | elif self._interpolate is not False: 166 | d = self.as_dict() 167 | d.update(self._interpolate) 168 | return interpolate_object(item, v, [d], self._interpolate_type) 169 | else: 170 | return v 171 | 172 | def __getattr__(self, item: str) -> Any: # noqa: D105 173 | try: 174 | return self[item] 175 | except KeyError: 176 | raise AttributeError(item) from None 177 | 178 | def get(self, key: str, default: Any = None) -> Union[dict, Any]: 179 | """Get the configuration values corresponding to `key`. 180 | 181 | Params: 182 | key: key to retrieve. 183 | default: default value in case the key is missing. 184 | 185 | Returns: 186 | the value found or a default. 187 | """ 188 | return self.as_dict().get(key, default) 189 | 190 | def as_dict(self) -> dict: 191 | """Return the representation as a dictionary.""" 192 | return self._config 193 | 194 | def as_attrdict(self) -> AttributeDict: 195 | """Return the representation as an attribute dictionary.""" 196 | return AttributeDict( 197 | { 198 | x: Configuration(v).as_attrdict() if isinstance(v, Mapping) else v 199 | for x, v in self.items(levels=1) 200 | }, 201 | ) 202 | 203 | def get_bool(self, item: str) -> bool: 204 | """Get the item value as a bool. 205 | 206 | Params: 207 | item: key 208 | """ 209 | return as_bool(self[item]) 210 | 211 | def get_str(self, item: str, fmt: str = "{}") -> str: 212 | """Get the item value as an int. 213 | 214 | Params: 215 | item: key 216 | fmt: format to use 217 | """ 218 | return fmt.format(self[item]) 219 | 220 | def get_int(self, item: str) -> int: 221 | """Get the item value as an int. 222 | 223 | Params: 224 | item: key 225 | """ 226 | return int(self[item]) 227 | 228 | def get_float(self, item: str) -> float: 229 | """Get the item value as a float. 230 | 231 | Params: 232 | item: key 233 | """ 234 | return float(self[item]) 235 | 236 | def get_list(self, item: str) -> List[Any]: 237 | """Get the item value as a list. 238 | 239 | Params: 240 | item: key 241 | """ 242 | return list(self[item]) 243 | 244 | def get_dict(self, item: str) -> dict: 245 | """Get the item values as a dictionary. 246 | 247 | Params: 248 | item: key 249 | """ 250 | return dict(self._get_subset(item)) 251 | 252 | def base64encode(self, item: str) -> bytes: 253 | """Get the item value as a Base64 encoded bytes instance. 254 | 255 | Params: 256 | item: key 257 | """ 258 | b = self[item] 259 | b = b if isinstance(b, bytes) else b.encode() 260 | return base64.b64encode(b) 261 | 262 | def base64decode(self, item: str) -> bytes: 263 | """Get the item value as a Base64 decoded bytes instance. 264 | 265 | Params: 266 | item: key 267 | """ 268 | b = self[item] 269 | b = b if isinstance(b, bytes) else b.encode() 270 | return base64.b64decode(b, validate=True) 271 | 272 | def keys( 273 | self, 274 | levels: Optional[int] = None, 275 | ) -> Union["Configuration", Any, KeysView[str]]: 276 | """Return a set-like object providing a view on the configuration keys.""" 277 | assert levels is None or levels > 0 278 | levels = self._default_levels if levels is None else levels 279 | try: 280 | return self["keys"] # don't filter levels, existing attribute 281 | except KeyError: 282 | return cast( 283 | KeysView[str], 284 | list( 285 | { 286 | ".".join(x.split(".")[:levels]) 287 | for x in set(self.as_dict().keys()) 288 | }, 289 | ), 290 | ) 291 | 292 | def values( 293 | self, 294 | levels: Optional[int] = None, 295 | ) -> Union["Configuration", Any, ValuesView[Any]]: 296 | """Return a set-like object providing a view on the configuration values.""" 297 | assert levels is None or levels > 0 298 | levels = self._default_levels if levels is None else levels 299 | try: 300 | return self["values"] 301 | except KeyError: 302 | return dict(self.items(levels=levels)).values() 303 | 304 | def items( 305 | self, 306 | levels: Optional[int] = None, 307 | ) -> Union["Configuration", Any, ItemsView[str, Any]]: 308 | """Return a set-like object providing a view on the configuration items.""" 309 | assert levels is None or levels > 0 310 | levels = self._default_levels if levels is None else levels 311 | try: 312 | return self["items"] 313 | except KeyError: 314 | keys = cast(KeysView[str], self.keys(levels=levels)) 315 | return {k: self._get_subset(k) for k in keys}.items() 316 | 317 | def __iter__(self) -> Iterator[Tuple[str, Any]]: # noqa: D105 318 | return iter(dict(self.items())) # type: ignore 319 | 320 | def __reversed__(self) -> Iterator[Tuple[str, Any]]: # noqa: D105 321 | if version_info < (3, 8): 322 | return OrderedDict( 323 | reversed(list(self.items())), 324 | ) # type: ignore # pragma: no cover 325 | else: 326 | return reversed(dict(self.items())) # type: ignore 327 | 328 | def __len__(self) -> int: # noqa: D105 329 | return len(self.keys()) 330 | 331 | def __setitem__(self, key: str, value: Any) -> None: # noqa: D105 332 | self.update({key: value}) 333 | 334 | def __delitem__(self, prefix: str) -> None: # noqa: D105 335 | """Filter a dictionary and delete the items that are prefixed by `prefix`. 336 | 337 | Params: 338 | prefix: prefix to filter on to delete keys 339 | """ 340 | remove = [] 341 | for k in self._config: 342 | kl = k.lower() if self._lowercase else k 343 | if kl == prefix or kl.startswith(prefix + "."): 344 | remove.append(k) 345 | if not remove: 346 | raise KeyError("No key with prefix '%s' found." % prefix) 347 | for k in remove: 348 | del self._config[k] 349 | 350 | def __contains__(self, prefix: str) -> bool: # noqa: D105 351 | try: 352 | self[prefix] 353 | return True 354 | except KeyError: 355 | return False 356 | 357 | def clear(self) -> None: 358 | """Remove all items.""" 359 | self._config.clear() 360 | 361 | def copy(self) -> "Configuration": 362 | """Return shallow copy.""" 363 | return Configuration(self._config) 364 | 365 | def pop(self, prefix: str, value: Any = None) -> Any: 366 | """Remove keys with the specified prefix and return the corresponding value. 367 | 368 | If the prefix is not found a KeyError is raised. 369 | """ 370 | try: 371 | value = self[prefix] 372 | del self[prefix] 373 | except KeyError: 374 | if value is None: 375 | raise 376 | return value 377 | 378 | def setdefault(self, key: str, default: Any = None) -> Any: 379 | """Insert key with a value of default if key is not in the Configuration. 380 | 381 | Return the value for key if key is in the Configuration, else default. 382 | """ 383 | try: 384 | return self[key] 385 | except KeyError: 386 | self[key] = default 387 | return self[key] 388 | 389 | def update(self, other: Mapping[str, Any]) -> None: 390 | """Update the Configuration with another Configuration object or Mapping.""" 391 | self._config.update(self._flatten_dict(other)) 392 | 393 | def reload(self) -> None: # pragma: no cover 394 | """Reload the configuration. 395 | 396 | This method is not implemented for simple Configuration objects and is 397 | intended only to be used in subclasses. 398 | """ 399 | raise NotImplementedError() 400 | 401 | def validate( 402 | self, 403 | schema: Any, 404 | raise_on_error: bool = False, 405 | nested: bool = False, 406 | **kwargs: Mapping[str, Any], 407 | ) -> bool: 408 | """Validate the current config using JSONSchema.""" 409 | try: 410 | from jsonschema import ValidationError, validate 411 | except ImportError: # pragma: no cover 412 | raise RuntimeError( 413 | "Validation requires the `jsonschema` library.", 414 | ) from None 415 | try: 416 | validate(self.as_attrdict() if nested else self.as_dict(), schema, **kwargs) 417 | except ValidationError as err: 418 | if raise_on_error: 419 | raise err 420 | return False 421 | return True 422 | 423 | @contextmanager 424 | def dotted_iter(self) -> Iterator["Configuration"]: 425 | """ 426 | Context manager for dotted iteration. 427 | 428 | This context manager changes all the iterator-related functions 429 | to include every nested (dotted) key instead of just the top level. 430 | """ 431 | self._default_levels = None 432 | try: 433 | yield self 434 | finally: 435 | self._default_levels = 1 436 | 437 | def __repr__(self) -> str: # noqa: D105 438 | return "<%s: %s>" % (type(self).__name__, hex(id(self))) 439 | 440 | def __str__(self) -> str: # noqa: D105 441 | return str({k: clean(k, v) for k, v in sorted(self.as_dict().items())}) 442 | -------------------------------------------------------------------------------- /src/config/configuration_set.py: -------------------------------------------------------------------------------- 1 | """ConfigurationSet class.""" 2 | 3 | import contextlib 4 | from typing import ( 5 | Any, 6 | ItemsView, 7 | Iterable, 8 | KeysView, 9 | List, 10 | Mapping, 11 | Optional, 12 | Union, 13 | ValuesView, 14 | ) 15 | 16 | from .configuration import Configuration 17 | from .helpers import InterpolateEnumType, InterpolateType, clean, interpolate_object 18 | 19 | 20 | class ConfigurationSet(Configuration): 21 | """Configuration Sets. 22 | 23 | A class that combines multiple [Configuration][config.configuration.Configuration] 24 | instances in a hierarchical manner. 25 | """ 26 | 27 | def __init__( 28 | self, 29 | *configs: Configuration, 30 | interpolate: InterpolateType = False, 31 | interpolate_type: InterpolateEnumType = InterpolateEnumType.STANDARD, 32 | ): # noqa: D107 33 | """Class Constructor.""" 34 | self._interpolate = {} if interpolate is True else interpolate 35 | self._interpolate_type = interpolate_type 36 | try: 37 | self._configs: List[Configuration] = list(configs) 38 | except Exception: # pragma: no cover 39 | raise ValueError( 40 | "configs should be a non-empty iterable of Configuration objects", 41 | ) from None 42 | if not self._configs: # pragma: no cover 43 | raise ValueError( 44 | "configs should be a non-empty iterable of Configuration objects", 45 | ) 46 | if not all( 47 | isinstance(x, Configuration) for x in self._configs 48 | ): # pragma: no cover 49 | raise ValueError( 50 | "configs should be a non-empty iterable of Configuration objects", 51 | ) 52 | self._writable = False 53 | self._default_levels = 1 54 | 55 | def _from_configs(self, attr: str, *args: Any, **kwargs: dict) -> Any: 56 | last_err = Exception() 57 | values = [] 58 | for config_ in self._configs: 59 | try: 60 | values.append(getattr(config_, attr)(*args, **kwargs)) 61 | except Exception as err: 62 | last_err = err 63 | continue 64 | if not values: 65 | # raise the last error 66 | raise last_err 67 | if all(isinstance(v, Configuration) for v in values): 68 | result: dict = {} 69 | for v in values[::-1]: 70 | result.update(v.as_dict()) 71 | return Configuration(result) 72 | elif isinstance(values[0], Configuration): 73 | result = {} 74 | for v in values[::-1]: 75 | if not isinstance(v, Configuration): 76 | continue 77 | result.update(v) 78 | return Configuration(result) 79 | elif self._interpolate is not False: 80 | d = [d.as_dict() for d in self._configs] 81 | d[0].update(self._interpolate) 82 | return interpolate_object(args[0], values[0], d, self._interpolate_type) 83 | else: 84 | return values[0] 85 | 86 | def _writable_config(self) -> Configuration: 87 | if not self._writable: 88 | lowercase = bool(self._configs and self._configs[0]._lowercase) 89 | self._configs.insert(0, Configuration({}, lowercase_keys=lowercase)) 90 | self._writable = True 91 | return self._configs[0] 92 | 93 | @property 94 | def configs(self) -> List[Configuration]: 95 | """List of underlying configuration objects.""" 96 | if self._writable: 97 | return self._configs[1:] 98 | else: 99 | return list(self._configs) 100 | 101 | @configs.setter 102 | def configs(self, iterable: Iterable[Configuration]) -> None: 103 | if self._writable: 104 | self._configs = [self._configs[0]] + list(iterable) 105 | else: 106 | self._configs = list(iterable) 107 | 108 | def __getitem__(self, item: str) -> Union[Configuration, Any]: # noqa: D105 109 | return self._from_configs("__getitem__", item) 110 | 111 | def __getattr__(self, item: str) -> Union[Configuration, Any]: # noqa: D105 112 | return self._from_configs("__getattr__", item) 113 | 114 | def get(self, key: str, default: Any = None) -> Union[dict, Any]: 115 | """Get the configuration values corresponding to `key`. 116 | 117 | Params: 118 | key: key to retrieve. 119 | default: default value in case the key is missing. 120 | 121 | Returns: 122 | the value found or a default 123 | """ 124 | try: 125 | return self[key] 126 | except Exception: 127 | return default 128 | 129 | def as_dict(self) -> dict: 130 | """Return the representation as a dictionary.""" 131 | result = {} 132 | for config_ in self._configs[::-1]: 133 | result.update(config_.as_dict()) 134 | return result 135 | 136 | def get_dict(self, item: str) -> dict: 137 | """Get the item values as a dictionary. 138 | 139 | item: key 140 | """ 141 | return Configuration(dict(dict(self[item]).items())).as_dict() 142 | 143 | def keys( 144 | self, 145 | levels: Optional[int] = None, 146 | ) -> Union["Configuration", Any, KeysView[str]]: 147 | """Return a set-like object providing a view on the configuration keys.""" 148 | if self._default_levels: 149 | return Configuration(self.as_dict()).keys(levels or self._default_levels) 150 | with Configuration(self.as_dict()).dotted_iter() as cfg: 151 | return cfg.keys(levels) 152 | 153 | def values( 154 | self, 155 | levels: Optional[int] = None, 156 | ) -> Union["Configuration", Any, ValuesView[Any]]: 157 | """Return a set-like object providing a view on the configuration values.""" 158 | if self._default_levels: 159 | return Configuration(self.as_dict()).values(levels or self._default_levels) 160 | with Configuration(self.as_dict()).dotted_iter() as cfg: 161 | return cfg.values(levels) 162 | 163 | def items( 164 | self, 165 | levels: Optional[int] = None, 166 | ) -> Union["Configuration", Any, ItemsView[str, Any]]: 167 | """Return a set-like object providing a view on the configuration items.""" 168 | if self._default_levels: 169 | return Configuration(self.as_dict()).items(levels or self._default_levels) 170 | with Configuration(self.as_dict()).dotted_iter() as cfg: 171 | return cfg.items(levels) 172 | 173 | def __setitem__(self, key: str, value: Any) -> None: # noqa: D105 174 | cfg = self._writable_config() 175 | cfg[key] = value 176 | 177 | def __delitem__(self, prefix: str) -> None: # noqa: D105 178 | removed = False 179 | for cfg in self._configs: 180 | try: 181 | del cfg[prefix] 182 | removed = True 183 | except KeyError: 184 | continue 185 | if not removed: 186 | raise KeyError() 187 | 188 | def __contains__(self, prefix: str) -> bool: # noqa: D105 189 | return any(prefix in cfg for cfg in self._configs) 190 | 191 | def clear(self) -> None: 192 | """Remove all items.""" 193 | for cfg in self._configs: 194 | cfg.clear() 195 | 196 | def copy(self) -> "Configuration": 197 | """Return shallow copy.""" 198 | return ConfigurationSet(*self._configs) 199 | 200 | def update(self, other: Mapping[str, Any]) -> None: 201 | """Update the ConfigurationSet with another Configuration object or Mapping.""" 202 | cfg = self._writable_config() 203 | cfg.update(other) 204 | 205 | def reload(self) -> None: 206 | """Reload the underlying configuration instances.""" 207 | for cfg in self._configs: 208 | with contextlib.suppress(NotImplementedError): 209 | cfg.reload() 210 | 211 | def __repr__(self) -> str: # noqa: D105 212 | return "" % hex(id(self)) 213 | 214 | def __str__(self) -> str: # noqa: D105 215 | return str({k: clean(k, v) for k, v in sorted(self.as_dict().items())}) 216 | -------------------------------------------------------------------------------- /src/config/contrib/__init__.py: -------------------------------------------------------------------------------- 1 | """Extra Configuration implementations.""" 2 | -------------------------------------------------------------------------------- /src/config/contrib/aws.py: -------------------------------------------------------------------------------- 1 | """Configuration instances from AWS Secrets Manager.""" 2 | 3 | import json 4 | import time 5 | from typing import Any, Dict, Optional 6 | 7 | import boto3 8 | from botocore.exceptions import ClientError 9 | 10 | from .. import Configuration, InterpolateType 11 | 12 | 13 | class Cache: 14 | """Cache class.""" 15 | 16 | def __init__(self, value: Dict[str, Any], ts: float): # noqa: D107 17 | self.value = value 18 | self.ts = ts 19 | 20 | 21 | class AWSSecretsManagerConfiguration(Configuration): 22 | """AWS Configuration class. 23 | 24 | The AWS Configuration class takes AWS Secrets Manager credentials and 25 | behaves like a drop-in replacement for the regular Configuration class. 26 | """ 27 | 28 | def __init__( 29 | self, 30 | secret_name: str, 31 | aws_access_key_id: Optional[str] = None, 32 | aws_secret_access_key: Optional[str] = None, 33 | aws_session_token: Optional[str] = None, 34 | region_name: Optional[str] = None, 35 | profile_name: Optional[str] = None, 36 | cache_expiration: int = 5 * 60, 37 | lowercase_keys: bool = False, 38 | interpolate: InterpolateType = False, 39 | ) -> None: 40 | """Class Constructor. 41 | 42 | secret_name: Name of the secret 43 | aws_access_key_id: AWS Access Key ID 44 | aws_secret_access_key: AWS Secret Access Key 45 | aws_session_token: AWS Temporary Session Token 46 | region_name: Region Name 47 | profile_name: Profile Name 48 | cache_expiration: Cache expiration (in seconds) 49 | lowercase_keys: whether to convert every key to lower case. 50 | """ 51 | self._session = boto3.session.Session( 52 | aws_access_key_id=aws_access_key_id, 53 | aws_secret_access_key=aws_secret_access_key, 54 | aws_session_token=aws_session_token, 55 | region_name=region_name, 56 | profile_name=profile_name, 57 | ) 58 | self._client = self._session.client(service_name="secretsmanager") 59 | self._secret_name = secret_name 60 | self._secret: Cache = Cache({}, 0) 61 | self._expiration: float = cache_expiration 62 | self._lowercase = lowercase_keys 63 | self._interpolate = {} if interpolate is True else interpolate 64 | self._default_levels = None 65 | 66 | @property 67 | def _config(self) -> Dict[str, Any]: # type: ignore 68 | now = time.time() 69 | if self._secret.ts + self._expiration > now: 70 | return self._secret.value 71 | try: 72 | get_secret_value_response = self._client.get_secret_value( 73 | SecretId=self._secret_name, 74 | ) 75 | except ClientError as e: # pragma: no cover 76 | if e.response["Error"]["Code"] == "DecryptionFailureException": 77 | # Secrets Manager can't decrypt the protected secret text using 78 | # the provided KMS key. 79 | # Deal with the exception here, and/or rethrow at your discretion. 80 | raise RuntimeError("Cannot read the AWS secret") from None 81 | elif e.response["Error"]["Code"] == "InternalServiceErrorException": 82 | # An error occurred on the server side. 83 | # Deal with the exception here, and/or rethrow at your discretion. 84 | raise RuntimeError("Cannot read the AWS secret") from None 85 | elif e.response["Error"]["Code"] == "InvalidParameterException": 86 | # You provided an invalid value for a parameter. 87 | # Deal with the exception here, and/or rethrow at your discretion. 88 | raise RuntimeError("Cannot read the AWS secret") from None 89 | elif e.response["Error"]["Code"] == "InvalidRequestException": 90 | # You provided a parameter value that is not valid for the current 91 | # state of the resource. 92 | # Deal with the exception here, and/or rethrow at your discretion. 93 | raise RuntimeError("Cannot read the AWS secret") from None 94 | elif e.response["Error"]["Code"] == "ResourceNotFoundException": 95 | # We can't find the resource that you asked for. 96 | # Deal with the exception here, and/or rethrow at your discretion. 97 | raise RuntimeError("Cannot read the AWS secret") from None 98 | else: 99 | # Decrypts secret using the associated KMS CMK. 100 | # Depending on whether the secret is a string or binary, one of these 101 | # fields will be populated. 102 | if "SecretString" in get_secret_value_response: 103 | secret: str = get_secret_value_response["SecretString"] 104 | else: 105 | raise ValueError("Binary AWS secrets are not supported.") 106 | 107 | self._secret = Cache(json.loads(secret), now) 108 | return self._secret.value 109 | 110 | def reload(self) -> None: 111 | """Reload the configuration.""" 112 | self._secret = Cache({}, 0) 113 | 114 | def __repr__(self) -> str: # noqa: D105 115 | return "" % self._secret_name 116 | -------------------------------------------------------------------------------- /src/config/contrib/azure.py: -------------------------------------------------------------------------------- 1 | """Configuration instances from Azure KeyVaults.""" 2 | 3 | import time 4 | from typing import Any, Dict, ItemsView, KeysView, Optional, Union, ValuesView, cast 5 | 6 | from azure.core.exceptions import ResourceNotFoundError 7 | from azure.identity import ClientSecretCredential 8 | from azure.keyvault.secrets import SecretClient 9 | 10 | from .. import Configuration, InterpolateType 11 | 12 | 13 | class Cache: 14 | """Cache class.""" 15 | 16 | def __init__(self, value: Optional[str], ts: float): # noqa: D107 17 | self.value = value 18 | self.ts = ts 19 | 20 | 21 | class AzureKeyVaultConfiguration(Configuration): 22 | """Azure Configuration class. 23 | 24 | The Azure Configuration class takes Azure KeyVault credentials and 25 | behaves like a drop-in replacement for the regular Configuration class. 26 | 27 | The following limitations apply to the Azure KeyVault Configurations: 28 | - keys must conform to the pattern '^[0-9a-zA-Z-]+$'. In particular, 29 | there is no support for levels and nested values as there are no 30 | natural key separators for the pattern above. 31 | - values must be strings. 32 | """ 33 | 34 | def __init__( 35 | self, 36 | az_client_id: str, 37 | az_client_secret: str, 38 | az_tenant_id: str, 39 | az_vault_name: str, 40 | cache_expiration: int = 5 * 60, 41 | interpolate: InterpolateType = False, 42 | ) -> None: 43 | """Class Constructor. 44 | 45 | az_client_id: Client ID 46 | az_client_secret: Client Secret 47 | az_tenant_id: Tenant ID 48 | az_vault_name: Vault Name 49 | cache_expiration: Cache expiration (in seconds) 50 | """ 51 | credentials = ClientSecretCredential( 52 | client_id=az_client_id, 53 | client_secret=az_client_secret, 54 | tenant_id=az_tenant_id, 55 | ) 56 | vault_url = f"https://{az_vault_name}.vault.azure.net/" 57 | self._kv_client = SecretClient(vault_url=vault_url, credential=credentials) 58 | self._cache_expiration = cache_expiration 59 | self._cache: Dict[str, Cache] = {} 60 | self._interpolate = {} if interpolate is True else interpolate 61 | self._default_levels = None 62 | 63 | def _get_secret(self, key: str) -> Optional[str]: 64 | key = key.replace("_", "-") # Normalize for Azure KeyVault 65 | now = time.time() 66 | from_cache = self._cache.get(key) 67 | if from_cache and from_cache.ts + self._cache_expiration > now: 68 | return from_cache.value 69 | try: 70 | secret = self._kv_client.get_secret(key) 71 | self._cache[key] = Cache(value=secret.value, ts=now) 72 | return secret.value 73 | except ResourceNotFoundError: 74 | if key in self._cache: 75 | del self._cache[key] 76 | return None 77 | 78 | def __getitem__(self, item: str) -> Any: # noqa: D105 79 | secret = self._get_secret(item) 80 | if secret is None: 81 | raise KeyError(item) 82 | else: 83 | return secret 84 | 85 | def __getattr__(self, item: str) -> Any: # noqa: D105 86 | secret = self._get_secret(item) 87 | if secret is None: 88 | raise AttributeError(item) 89 | else: 90 | return secret 91 | 92 | def get(self, key: str, default: Any = None) -> Union[dict, Any]: 93 | """Get the configuration values corresponding to `key`. 94 | 95 | Params: 96 | key: key to retrieve. 97 | default: default value in case the key is missing. 98 | 99 | Returns: 100 | the value found or a default. 101 | """ 102 | secret = self._get_secret(key) 103 | if secret is None: 104 | return default 105 | else: 106 | return secret 107 | 108 | def keys( 109 | self, 110 | levels: Optional[int] = None, 111 | ) -> Union["Configuration", Any, KeysView[str]]: 112 | """Return a set-like object providing a view on the configuration keys.""" 113 | assert not levels # Azure Key Vaults don't support separators 114 | return cast( 115 | KeysView[str], 116 | (k.name for k in self._kv_client.list_properties_of_secrets()), 117 | ) 118 | 119 | def values( 120 | self, 121 | levels: Optional[int] = None, 122 | ) -> Union["Configuration", Any, ValuesView[Any]]: 123 | """Return a set-like object providing a view on the configuration values.""" 124 | assert not levels # Azure Key Vaults don't support separators 125 | return cast( 126 | ValuesView[str], 127 | ( 128 | self._get_secret(cast(str, k.name)) 129 | for k in self._kv_client.list_properties_of_secrets() 130 | ), 131 | ) 132 | 133 | def items( 134 | self, 135 | levels: Optional[int] = None, 136 | ) -> Union["Configuration", Any, ItemsView[str, Any]]: 137 | """Return a set-like object providing a view on the configuration items.""" 138 | assert not levels # Azure Key Vaults don't support separators 139 | return cast( 140 | ItemsView[str, Any], 141 | ( 142 | (k.name, self._get_secret(cast(str, k.name))) 143 | for k in self._kv_client.list_properties_of_secrets() 144 | ), 145 | ) 146 | 147 | def reload(self) -> None: 148 | """Reload the configuration.""" 149 | self._cache.clear() 150 | 151 | def __repr__(self) -> str: # noqa: D105 152 | return f"" 153 | 154 | @property 155 | def _config(self) -> Dict[str, Any]: # type: ignore 156 | return dict(self.items()) 157 | -------------------------------------------------------------------------------- /src/config/contrib/gcp.py: -------------------------------------------------------------------------------- 1 | """Configuration instances from GCP Secret Manager.""" 2 | 3 | import time 4 | from typing import Any, Dict, ItemsView, KeysView, Optional, Union, ValuesView, cast 5 | 6 | from google.api_core.client_options import ClientOptions 7 | from google.api_core.exceptions import NotFound 8 | from google.auth.credentials import Credentials 9 | from google.cloud import secretmanager_v1 10 | 11 | from .. import Configuration, InterpolateType 12 | 13 | 14 | class Cache: 15 | """Cache class.""" 16 | 17 | def __init__(self, value: str, ts: float): # noqa: D107 18 | self.value = value 19 | self.ts = ts 20 | 21 | 22 | class GCPSecretManagerConfiguration(Configuration): 23 | """GCP Secret Manager Configuration class. 24 | 25 | The GCP Secret Manager Configuration class takes GCP Secret Manager credentials and 26 | behaves like a drop-in replacement for the regular Configuration class. 27 | 28 | The following limitations apply to the GCP Secret Manager Configurations: 29 | - keys must conform to the pattern '^[0-9a-zA-Z-_]+$'. In particular, 30 | there is no support for levels and nested values as there are no 31 | natural key separators for the pattern above. 32 | - values must be strings. 33 | """ 34 | 35 | def __init__( 36 | self, 37 | project_id: str, 38 | credentials: Optional[Credentials] = None, 39 | client_options: Optional[ClientOptions] = None, 40 | cache_expiration: int = 5 * 60, 41 | interpolate: InterpolateType = False, 42 | ) -> None: 43 | """Class Constructor. 44 | 45 | See https://googleapis.dev/python/secretmanager/latest/gapic/v1/api.html#google.cloud.secretmanager_v1.SecretManagerServiceClient 46 | for more details on credentials and options. 47 | 48 | project_id: GCP Project ID 49 | credentials: GCP credentials 50 | client_options: GCP client_options 51 | cache_expiration: Cache expiration (in seconds) 52 | """ # noqa: E501 53 | self._client = secretmanager_v1.SecretManagerServiceClient( 54 | credentials=credentials, 55 | client_options=client_options, 56 | ) 57 | self._project_id = project_id 58 | self._parent = f"projects/{project_id}" 59 | self._cache_expiration = cache_expiration 60 | self._cache: Dict[str, Cache] = {} 61 | self._interpolate = {} if interpolate is True else interpolate 62 | self._default_levels = None 63 | 64 | def _get_secret(self, key: str) -> Optional[str]: 65 | now = time.time() 66 | from_cache = self._cache.get(key) 67 | if from_cache and from_cache.ts + self._cache_expiration > now: 68 | return from_cache.value 69 | try: 70 | path = f"projects/{self._project_id}/secrets/{key}/versions/latest" 71 | secret = self._client.access_secret_version( 72 | request={"name": path}, 73 | ).payload.data.decode() 74 | self._cache[key] = Cache(value=secret, ts=now) 75 | return secret 76 | except NotFound: 77 | if key in self._cache: 78 | del self._cache[key] 79 | return None 80 | 81 | def __getitem__(self, item: str) -> Any: # noqa: D105 82 | secret = self._get_secret(item) 83 | if secret is None: 84 | raise KeyError(item) 85 | else: 86 | return secret 87 | 88 | def __getattr__(self, item: str) -> Any: # noqa: D105 89 | secret = self._get_secret(item) 90 | if secret is None: 91 | raise AttributeError(item) 92 | else: 93 | return secret 94 | 95 | def get(self, key: str, default: Any = None) -> Union[dict, Any]: 96 | """Get the configuration values corresponding to `key`. 97 | 98 | Params: 99 | key: key to retrieve. 100 | default: default value in case the key is missing. 101 | 102 | Returns: 103 | the value found or a default. 104 | """ 105 | secret = self._get_secret(key) 106 | if secret is None: 107 | return default 108 | else: 109 | return secret 110 | 111 | def keys( 112 | self, 113 | levels: Optional[int] = None, 114 | ) -> Union["Configuration", Any, KeysView[str]]: 115 | """Return a set-like object providing a view on the configuration keys.""" 116 | assert not levels # GCP Secret Manager secrets don't support separators 117 | return cast( 118 | KeysView[str], 119 | ( 120 | k.name.split("/")[-1] 121 | for k in self._client.list_secrets(request={"parent": self._parent}) 122 | ), 123 | ) 124 | 125 | def values( 126 | self, 127 | levels: Optional[int] = None, 128 | ) -> Union["Configuration", Any, ValuesView[Any]]: 129 | """Return a set-like object providing a view on the configuration values.""" 130 | assert not levels # GCP Secret Manager secrets don't support separators 131 | return cast( 132 | ValuesView[str], 133 | ( 134 | self._get_secret(k.name.split("/")[-1]) 135 | for k in self._client.list_secrets(request={"parent": self._parent}) 136 | ), 137 | ) 138 | 139 | def items( 140 | self, 141 | levels: Optional[int] = None, 142 | ) -> Union["Configuration", Any, ItemsView[str, Any]]: 143 | """Return a set-like object providing a view on the configuration items.""" 144 | assert not levels # GCP Secret Manager secrets don't support separators 145 | return cast( 146 | ItemsView[str, Any], 147 | ( 148 | (k.name.split("/")[-1], self._get_secret(k.name.split("/")[-1])) 149 | for k in self._client.list_secrets(request={"parent": self._parent}) 150 | ), 151 | ) 152 | 153 | def reload(self) -> None: 154 | """Reload the configuration.""" 155 | self._cache.clear() 156 | 157 | def __repr__(self) -> str: # noqa: D105 158 | return "" % self._project_id 159 | 160 | @property 161 | def _config(self) -> Dict[str, Any]: # type: ignore 162 | return dict(self.items()) 163 | -------------------------------------------------------------------------------- /src/config/contrib/vault.py: -------------------------------------------------------------------------------- 1 | """Configuration instances from Hashicorp Vault.""" 2 | 3 | import time 4 | from typing import ( 5 | Any, 6 | Dict, 7 | ItemsView, 8 | KeysView, 9 | Mapping, 10 | Optional, 11 | Union, 12 | ValuesView, 13 | cast, 14 | ) 15 | 16 | import hvac 17 | from hvac.exceptions import InvalidPath 18 | 19 | from .. import Configuration, InterpolateType, config_from_dict 20 | 21 | 22 | class Cache: 23 | """Cache class.""" 24 | 25 | def __init__(self, value: Dict[str, Any], ts: float): # noqa: D107 26 | self.value = value 27 | self.ts = ts 28 | 29 | 30 | class HashicorpVaultConfiguration(Configuration): 31 | """Hashicorp Vault Configuration class. 32 | 33 | The Hashicorp Vault Configuration class takes Vault credentials and 34 | behaves like a drop-in replacement for the regular Configuration class. 35 | 36 | The following limitations apply to the Hashicorp Vault Configurations: 37 | - only works with KV version 2 38 | - only supports the latest secret version 39 | - assumes that secrets are named as // 40 | """ 41 | 42 | def __init__( 43 | self, 44 | engine: str, 45 | cache_expiration: int = 5 * 60, 46 | interpolate: InterpolateType = False, 47 | **kwargs: Mapping[str, Any], 48 | ) -> None: 49 | """Class Constructor. 50 | 51 | See https://developer.hashicorp.com/vault/docs/get-started/developer-qs. 52 | """ # noqa: E501 53 | self._client = hvac.Client(**kwargs) 54 | self._cache_expiration = cache_expiration 55 | self._cache: Dict[str, Cache] = {} 56 | self._engine = engine 57 | self._interpolate = {} if interpolate is True else interpolate 58 | self._default_levels = None 59 | 60 | def _get_secret(self, secret: str) -> Optional[Dict[str, Any]]: 61 | now = time.time() 62 | from_cache = self._cache.get(secret) 63 | if from_cache and from_cache.ts + self._cache_expiration > now: 64 | return from_cache.value 65 | try: 66 | data = cast( 67 | Dict[str, Any], 68 | self._client.kv.v2.read_secret(secret, mount_point=self._engine)[ 69 | "data" 70 | ]["data"], 71 | ) 72 | self._cache[secret] = Cache(value=data, ts=now) 73 | return data 74 | except (InvalidPath, KeyError): 75 | if secret in self._cache: 76 | del self._cache[secret] 77 | return None 78 | 79 | def __getitem__(self, item: str) -> Any: # noqa: D105 80 | path, *rest = item.split(".", 1) 81 | secret = self._get_secret(path) 82 | if secret is None: 83 | raise KeyError(item) 84 | else: 85 | return ( 86 | Configuration(secret)[".".join(rest)] if rest else Configuration(secret) 87 | ) 88 | 89 | def __getattr__(self, item: str) -> Any: # noqa: D105 90 | secret = self._get_secret(item) 91 | if secret is None: 92 | raise AttributeError(item) 93 | else: 94 | return Configuration(secret) 95 | 96 | def get(self, key: str, default: Any = None) -> Union[dict, Any]: 97 | """Get the configuration values corresponding to `key`. 98 | 99 | Params: 100 | key: key to retrieve. 101 | default: default value in case the key is missing. 102 | 103 | Returns: 104 | the value found or a default. 105 | """ 106 | try: 107 | return self[key] 108 | except KeyError: 109 | return default 110 | 111 | def keys( 112 | self, 113 | levels: Optional[int] = None, 114 | ) -> Union["Configuration", Any, KeysView[str]]: 115 | """Return a set-like object providing a view on the configuration keys.""" 116 | assert not levels # Vault secrets don't support separators 117 | return cast( 118 | KeysView[str], 119 | self._client.list(f"/{self._engine}/metadata")["data"]["keys"], 120 | ) 121 | 122 | def values( 123 | self, 124 | levels: Optional[int] = None, 125 | ) -> Union["Configuration", Any, ValuesView[Any]]: 126 | """Return a set-like object providing a view on the configuration values.""" 127 | assert not levels # GCP Secret Manager secrets don't support separators 128 | return cast( 129 | ValuesView[str], 130 | ( 131 | self._get_secret(k) 132 | for k in self._client.list(f"/{self._engine}/metadata")["data"]["keys"] 133 | ), 134 | ) 135 | 136 | def items( 137 | self, 138 | levels: Optional[int] = None, 139 | ) -> Union["Configuration", Any, ItemsView[str, Any]]: 140 | """Return a set-like object providing a view on the configuration items.""" 141 | assert not levels # GCP Secret Manager secrets don't support separators 142 | return cast( 143 | ItemsView[str, Any], 144 | ( 145 | (k, self._get_secret(k)) 146 | for k in self._client.list(f"/{self._engine}/metadata")["data"]["keys"] 147 | ), 148 | ) 149 | 150 | def reload(self) -> None: 151 | """Reload the configuration.""" 152 | self._cache.clear() 153 | 154 | def __repr__(self) -> str: # noqa: D105 155 | return "" % self._engine 156 | 157 | @property 158 | def _config(self) -> Dict[str, Any]: # type: ignore 159 | return config_from_dict(dict(self.items()))._config 160 | -------------------------------------------------------------------------------- /src/config/helpers.py: -------------------------------------------------------------------------------- 1 | """Helper functions.""" 2 | 3 | import string 4 | from enum import Enum 5 | from typing import Any, Dict, List, Set, Tuple, Union 6 | 7 | TRUTH_TEXT = frozenset(("t", "true", "y", "yes", "on", "1")) 8 | FALSE_TEXT = frozenset(("f", "false", "n", "no", "off", "0", "")) 9 | PROTECTED_KEYS = frozenset(("secret", "password", "passwd", "pwd", "token")) 10 | 11 | InterpolateType = Union[bool, Dict[str, str]] 12 | 13 | 14 | class InterpolateEnumType(Enum): 15 | """Interpolation Method.""" 16 | 17 | # standard matching 18 | STANDARD = 0 19 | # interpolation will look through lower levels to attempt to resolve variables. 20 | # This is particularly useful for templating 21 | DEEP = 1 22 | # similar to DEEP, but interpolating will not backtrack levels. 23 | # That is, lower levels cannot use values from higher levels. 24 | DEEP_NO_BACKTRACK = 2 25 | 26 | 27 | class AttributeDict(dict): 28 | """Dictionary subclass enabling attribute lookup/assignment of keys/values.""" 29 | 30 | def __getattr__(self, key: Any) -> Any: # noqa: D105 31 | try: 32 | return self[key] 33 | except KeyError: 34 | # to conform with __getattr__ spec 35 | raise AttributeError(key) from None 36 | 37 | def __setattr__(self, key: Any, value: Any) -> None: # noqa: D105 38 | self[key] = value 39 | 40 | 41 | def as_bool(s: Any) -> bool: 42 | """Boolean value from an object. 43 | 44 | Return the boolean value ``True`` if the case-lowered value of string 45 | input ``s`` is a `truthy string`. If ``s`` is already one of the 46 | boolean values ``True`` or ``False``, return it. 47 | """ 48 | if s is None: 49 | return False 50 | if isinstance(s, bool): 51 | return s 52 | s = str(s).strip().lower() 53 | if s not in TRUTH_TEXT and s not in FALSE_TEXT: 54 | raise ValueError("Expected a valid True or False expression.") 55 | return s in TRUTH_TEXT 56 | 57 | 58 | def clean(key: str, value: Any, mask: str = "******") -> Any: 59 | """Mask a value if needed. 60 | 61 | Params: 62 | key: key. 63 | value: value to hide. 64 | mask: string to use in case value should be hidden. 65 | 66 | Returns: 67 | clear value or mask. 68 | """ 69 | key = key.lower() 70 | # check for protected keys 71 | for pk in PROTECTED_KEYS: 72 | if pk in key: 73 | return mask 74 | # urls 75 | if isinstance(value, str) and "://" in value: 76 | from urllib.parse import urlparse 77 | 78 | url = urlparse(value) 79 | if url.password is None: 80 | return value 81 | else: 82 | return url._replace( 83 | netloc="{}:{}@{}".format(url.username, mask, url.hostname), 84 | ).geturl() 85 | return value 86 | 87 | 88 | def interpolate_standard(text: str, d: dict, found: Set[Tuple[str, ...]]) -> str: 89 | """Return the string interpolated as many times as needed. 90 | 91 | Params: 92 | text: string possibly containing an interpolation pattern 93 | d: dictionary 94 | found: variables found so far 95 | """ 96 | if not isinstance(text, str): 97 | return text 98 | 99 | variables = tuple( 100 | sorted(x[1] for x in string.Formatter().parse(text) if x[1] is not None), 101 | ) 102 | 103 | if not variables: 104 | return text 105 | 106 | if variables in found: 107 | raise ValueError("Cycle detected while interpolating keys") 108 | else: 109 | found.add(variables) 110 | 111 | interpolated = {v: interpolate_standard(d[v], d, found) for v in variables} 112 | return text.format(**interpolated) 113 | 114 | 115 | def interpolate_deep( 116 | attr: str, 117 | text: str, 118 | d: List[dict], 119 | resolved: Dict[str, str], 120 | levels: Dict[str, int], 121 | method: InterpolateEnumType, 122 | ) -> str: 123 | """Return the string interpolated as many times as needed. 124 | 125 | Params: 126 | attr: attribute name 127 | text: string possibly containing an interpolation pattern 128 | d: dictionary 129 | resolved: variables resolved so far 130 | levels: last level to read the variable from 131 | """ 132 | if not isinstance(text, str): 133 | return text 134 | 135 | variables = {x[1] for x in string.Formatter().parse(text) if x[1] is not None} 136 | 137 | if not variables: 138 | return text 139 | 140 | length = len(d) 141 | 142 | for variable in variables.difference(resolved.keys()): 143 | # start at 1 if this is the intended attribute 144 | level = levels.setdefault(variable, 1 if variable == attr else 0) 145 | # get the first level for which the variable is defined 146 | if level == length: 147 | raise KeyError(variable) 148 | for i, dict_ in enumerate(d[level:]): 149 | if variable in dict_: 150 | level = level + i 151 | break 152 | else: 153 | raise KeyError(variable) 154 | levels[variable] = level + 1 155 | 156 | new_d = ( 157 | ([{}] * level) + d[level:] 158 | if method == InterpolateEnumType.DEEP_NO_BACKTRACK 159 | else d 160 | ) 161 | resolved[variable] = interpolate_deep( 162 | attr, 163 | d[level][variable], 164 | new_d, 165 | resolved, 166 | levels, 167 | method, 168 | ) 169 | 170 | return text.format(**resolved) 171 | 172 | 173 | def flatten(d: List[dict]) -> dict: 174 | """Flatten a list of dictionaries. 175 | 176 | Params: 177 | d: dictionary list 178 | """ 179 | result = {} 180 | [result.update(dict_) for dict_ in d[::-1]] 181 | return result 182 | 183 | 184 | def interpolate_object( 185 | attr: str, 186 | obj: Any, 187 | d: List[dict], 188 | method: InterpolateEnumType, 189 | ) -> Any: 190 | """Return the interpolated object. 191 | 192 | Params: 193 | attr: attribute name 194 | obj: object to interpolate 195 | d: dictionary 196 | method: interpolation method 197 | """ 198 | if isinstance(obj, str): 199 | if method == InterpolateEnumType.STANDARD: 200 | return interpolate_standard(obj, flatten(d), set()) 201 | elif method in ( 202 | InterpolateEnumType.DEEP, 203 | InterpolateEnumType.DEEP_NO_BACKTRACK, 204 | ): 205 | return interpolate_deep(attr, obj, d, {}, {}, method) 206 | else: 207 | raise ValueError('Invalid interpolation method "%s"' % method) 208 | elif hasattr(obj, "__iter__"): 209 | if isinstance(obj, tuple): 210 | return tuple(interpolate_object(attr, x, d, method) for x in obj) 211 | else: 212 | return [interpolate_object(attr, x, d, method) for x in obj] 213 | else: 214 | return obj 215 | 216 | 217 | def parse_env_line(line: str) -> Tuple[str, str]: 218 | """Split an env line into variable and value.""" 219 | try: 220 | key, value = tuple(y.strip() for y in line.split("=", 1)) 221 | except ValueError: 222 | raise ValueError("Invalid line %s" % line) from None 223 | return key.strip(), value.strip() 224 | -------------------------------------------------------------------------------- /src/config/py.typed: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tr11/python-configuration/f9da97f5e6a387753052d9f3850d7d41f4a75e10/src/config/py.typed -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- 1 | """Tests.""" 2 | -------------------------------------------------------------------------------- /tests/contrib/__init__.py: -------------------------------------------------------------------------------- 1 | """Contrib modules.""" 2 | -------------------------------------------------------------------------------- /tests/contrib/test_aws.py: -------------------------------------------------------------------------------- 1 | """Tests for AWS Secrets support.""" 2 | 3 | # ruff: noqa: D101,D102,D103,D106,D107,E501 4 | 5 | import json 6 | 7 | import pytest 8 | from pytest import raises 9 | 10 | try: 11 | import boto3 as aws 12 | from config.contrib.aws import AWSSecretsManagerConfiguration 13 | except ImportError: # pragma: no cover 14 | aws = None 15 | 16 | 17 | DICT = { 18 | "foo": "foo_val", 19 | "bar": "bar_val", 20 | "test.a": "val_a", 21 | "test.b": "val_b", 22 | "password": "some passwd", 23 | } 24 | 25 | DICT2 = {"a": "b", "c": "d"} 26 | 27 | 28 | class MockSession: 29 | def __init__(self, val, *args, **kwargs): # type: ignore 30 | self._value = val 31 | 32 | class Client: 33 | def __init__(self, val): # type: ignore 34 | self._value = val 35 | 36 | def get_secret_value(self, SecretId: str): # type: ignore # noqa: N803 37 | return {"SecretString": json.dumps(self._value)} 38 | 39 | def client(self, *args, **kwargs): # type: ignore 40 | return self.Client(self._value) 41 | 42 | 43 | class MockSessionFail: 44 | def __init__(self, val, *args, **kwargs): # type: ignore 45 | self._value = val 46 | 47 | class Client: 48 | def __init__(self, val): # type: ignore 49 | self._value = val 50 | 51 | def get_secret_value(self, SecretId: str): # type: ignore # noqa: N803 52 | return self._value 53 | 54 | def client(self, *args, **kwargs): # type: ignore 55 | return self.Client(self._value) 56 | 57 | 58 | @pytest.mark.skipif("aws is None") 59 | def test_load_dict(mocker): # type: ignore 60 | mocker.patch.object(aws.session, "Session", return_value=MockSession(DICT)) 61 | cfg = AWSSecretsManagerConfiguration(secret_name="test-secret") 62 | 63 | assert cfg.as_dict() == DICT 64 | 65 | 66 | @pytest.mark.skipif("aws is None") 67 | def test_expiration(mocker): # type: ignore 68 | mocker.patch.object(aws.session, "Session", return_value=MockSession(DICT)) 69 | 70 | # with cache 71 | cfg = AWSSecretsManagerConfiguration(secret_name="test-secret") 72 | mocker.patch.object( 73 | cfg._client, 74 | "get_secret_value", 75 | return_value={"SecretString": json.dumps(DICT)}, 76 | ) 77 | assert cfg["foo"] == "foo_val" 78 | cfg._client.get_secret_value.assert_called_once() 79 | cfg._client.get_secret_value.reset_mock() 80 | assert cfg["foo"] == "foo_val" 81 | cfg._client.get_secret_value.assert_not_called() 82 | mocker.resetall() 83 | 84 | # without cache 85 | cfg = AWSSecretsManagerConfiguration(secret_name="test-secret", cache_expiration=0) 86 | mocker.patch.object( 87 | cfg._client, 88 | "get_secret_value", 89 | return_value={"SecretString": json.dumps(DICT)}, 90 | ) 91 | assert cfg["foo"] == "foo_val" 92 | cfg._client.get_secret_value.assert_called() 93 | cfg._client.get_secret_value.reset_mock() 94 | assert cfg["foo"] == "foo_val" 95 | cfg._client.get_secret_value.assert_called() 96 | 97 | 98 | @pytest.mark.skipif("aws is None") 99 | def test_missing_key(mocker): # type: ignore 100 | mocker.patch.object(aws.session, "Session", return_value=MockSession(DICT)) 101 | cfg = AWSSecretsManagerConfiguration(secret_name="test-secret", cache_expiration=0) 102 | 103 | with raises(KeyError): 104 | assert cfg["foo-missing"] is KeyError 105 | 106 | assert cfg.get("foo-missing", "default") == "default" 107 | 108 | 109 | @pytest.mark.skipif("aws is None") 110 | def test_get_attr(mocker): # type: ignore 111 | mocker.patch.object(aws.session, "Session", return_value=MockSession(DICT)) 112 | cfg = AWSSecretsManagerConfiguration(secret_name="test-secret", cache_expiration=0) 113 | 114 | assert cfg.foo == "foo_val" 115 | 116 | with raises(AttributeError): 117 | assert cfg.foo_missing is AttributeError 118 | 119 | 120 | @pytest.mark.skipif("aws is None") 121 | def test_dict(mocker): # type: ignore 122 | mocker.patch.object(aws.session, "Session", return_value=MockSession(DICT)) 123 | cfg = AWSSecretsManagerConfiguration(secret_name="test-secret", cache_expiration=0) 124 | 125 | assert sorted(cfg.keys()) == sorted(DICT.keys()) 126 | assert sorted(cfg.values()) == sorted(DICT.values()) 127 | assert sorted(cfg.items()) == sorted(DICT.items()) 128 | 129 | assert sorted(cfg.keys(levels=1)) == ["bar", "foo", "password", "test"] 130 | assert sorted(cfg.items(levels=1)) == [ 131 | ("bar", "bar_val"), 132 | ("foo", "foo_val"), 133 | ("password", "some passwd"), 134 | ("test", {"a": "val_a", "b": "val_b"}), 135 | ] 136 | 137 | 138 | @pytest.mark.skipif("aws is None") 139 | def test_repr(mocker): # type: ignore 140 | mocker.patch.object(aws.session, "Session", return_value=MockSession(DICT)) 141 | cfg = AWSSecretsManagerConfiguration(secret_name="test-secret", cache_expiration=0) 142 | assert repr(cfg) == "" 143 | 144 | 145 | @pytest.mark.skipif("aws is None") 146 | def test_str(mocker): # type: ignore 147 | mocker.patch.object(aws.session, "Session", return_value=MockSession(DICT)) 148 | cfg = AWSSecretsManagerConfiguration(secret_name="test-secret", cache_expiration=0) 149 | 150 | # str 151 | assert ( 152 | str(cfg) 153 | == "{'bar': 'bar_val', 'foo': 'foo_val', 'password': '******', 'test.a': 'val_a', 'test.b': 'val_b'}" 154 | ) 155 | assert cfg["password"] == "some passwd" 156 | 157 | 158 | @pytest.mark.skipif("aws is None") 159 | def test_fail_binary(mocker): # type: ignore 160 | mocker.patch.object(aws.session, "Session", return_value=MockSessionFail(DICT)) 161 | cfg = AWSSecretsManagerConfiguration(secret_name="test-secret", cache_expiration=0) 162 | 163 | with raises(ValueError): 164 | cfg.as_dict() 165 | 166 | 167 | @pytest.mark.skipif("aws is None") 168 | def test_reload(mocker): # type: ignore 169 | mocker.patch.object(aws.session, "Session", return_value=MockSession(DICT)) 170 | cfg = AWSSecretsManagerConfiguration(secret_name="test-secret") 171 | assert cfg.as_dict() == DICT 172 | 173 | mocker.patch.object(aws.session, "Session", return_value=MockSession(DICT2)) 174 | cfg._client = MockSession(DICT2).client(service_name="secretsmanager") 175 | cfg.reload() 176 | assert cfg.as_dict() == DICT2 177 | -------------------------------------------------------------------------------- /tests/contrib/test_azure.py: -------------------------------------------------------------------------------- 1 | """Tests for Azure Keyvault support.""" 2 | 3 | # ruff: noqa: D101,D102,D103,D107,E501 4 | 5 | from collections import namedtuple 6 | 7 | import pytest 8 | from config import config_from_dict 9 | from pytest import raises 10 | 11 | try: 12 | from azure.core.exceptions import ResourceNotFoundError 13 | from config.contrib.azure import AzureKeyVaultConfiguration 14 | 15 | azure = True 16 | except ImportError: # pragma: no cover 17 | azure = None # type: ignore 18 | raise 19 | 20 | DICT = { 21 | "foo": "foo_val", 22 | "bar": "bar_val", 23 | "with-underscore": "works", 24 | "password": "some passwd", 25 | } 26 | 27 | DICT2 = {"a": "b", "c": "d"} 28 | 29 | FakeKeySecret = namedtuple("FakeKeySecret", ["key", "value"]) 30 | 31 | 32 | class Secret: 33 | def __init__(self, name: str, value: str): 34 | self.name = name 35 | self.value = value 36 | 37 | 38 | class FakeSecretClient: 39 | vault_url = "vault URL" 40 | 41 | def __init__(self, dct: dict): 42 | self._dict = dct 43 | 44 | def get_secret(self, key: str) -> FakeKeySecret: 45 | if "_" in key: 46 | raise ValueError("Azure Key Vault doesn't take underscores.") 47 | if key in self._dict: 48 | return FakeKeySecret(key, self._dict[key]) 49 | else: 50 | raise ResourceNotFoundError() 51 | 52 | def list_properties_of_secrets(self) -> list: 53 | return [Secret(name=k, value=v) for k, v in self._dict.items()] 54 | 55 | 56 | @pytest.mark.skipif("azure is None") 57 | def test_load_dict(): # type: ignore 58 | cfg = AzureKeyVaultConfiguration( 59 | "fake_id", 60 | "fake_secret", 61 | "fake-tenant", 62 | "fake_vault", 63 | ) 64 | cfg._kv_client = FakeSecretClient(DICT) 65 | assert cfg["foo"] == "foo_val" 66 | assert cfg["with_underscore"] == "works" 67 | assert cfg.get("foo", "default") == "foo_val" 68 | 69 | 70 | @pytest.mark.skipif("azure is None") 71 | def test_expiration(mocker): # type: ignore 72 | # with cache 73 | cfg = AzureKeyVaultConfiguration( 74 | "fake_id", 75 | "fake_secret", 76 | "fake-tenant", 77 | "fake_vault", 78 | ) 79 | cfg._kv_client = FakeSecretClient(DICT) 80 | 81 | spy = mocker.spy(cfg._kv_client, "get_secret") 82 | assert cfg["foo"] == "foo_val" 83 | assert cfg["foo"] == "foo_val" 84 | assert spy.call_count == 1 85 | 86 | # without cache 87 | cfg = AzureKeyVaultConfiguration( 88 | "fake_id", 89 | "fake_secret", 90 | "fake-tenant", 91 | "fake_vault", 92 | cache_expiration=0, 93 | ) 94 | cfg._kv_client = FakeSecretClient(DICT) 95 | 96 | spy = mocker.spy(cfg._kv_client, "get_secret") 97 | assert cfg["foo"] == "foo_val" 98 | assert cfg["foo"] == "foo_val" # this will ignore the cache 99 | assert spy.call_count == 2 100 | 101 | 102 | @pytest.mark.skipif("azure is None") 103 | def test_deletion(): # type: ignore 104 | cfg = AzureKeyVaultConfiguration( 105 | "fake_id", 106 | "fake_secret", 107 | "fake-tenant", 108 | "fake_vault", 109 | cache_expiration=0, 110 | ) 111 | d = DICT.copy() 112 | cfg._kv_client = FakeSecretClient(d) 113 | 114 | assert cfg["foo"] == "foo_val" 115 | assert "foo" in cfg._cache 116 | del d["foo"] 117 | 118 | with raises(KeyError): 119 | assert cfg["foo"] is KeyError 120 | 121 | 122 | @pytest.mark.skipif("azure is None") 123 | def test_missing_key(): # type: ignore 124 | cfg = AzureKeyVaultConfiguration( 125 | "fake_id", 126 | "fake_secret", 127 | "fake-tenant", 128 | "fake_vault", 129 | cache_expiration=0, 130 | ) 131 | d = DICT.copy() 132 | cfg._kv_client = FakeSecretClient(d) 133 | 134 | with raises(KeyError): 135 | assert cfg["foo-missing"] is KeyError 136 | 137 | assert cfg.get("foo-missing", "default") == "default" 138 | 139 | 140 | @pytest.mark.skipif("azure is None") 141 | def test_get_attr(): # type: ignore 142 | cfg = AzureKeyVaultConfiguration( 143 | "fake_id", 144 | "fake_secret", 145 | "fake-tenant", 146 | "fake_vault", 147 | cache_expiration=0, 148 | ) 149 | d = DICT.copy() 150 | cfg._kv_client = FakeSecretClient(d) 151 | 152 | assert cfg.foo == "foo_val" 153 | 154 | with raises(AttributeError): 155 | assert cfg.foo_missing is AttributeError 156 | 157 | 158 | @pytest.mark.skipif("azure is None") 159 | def test_dict(): # type: ignore 160 | cfg = AzureKeyVaultConfiguration( 161 | "fake_id", 162 | "fake_secret", 163 | "fake-tenant", 164 | "fake_vault", 165 | cache_expiration=0, 166 | ) 167 | d = DICT.copy() 168 | cfg._kv_client = FakeSecretClient(d) 169 | 170 | assert sorted(cfg.keys()) == sorted(d.keys()) 171 | assert sorted(cfg.values()) == sorted(d.values()) 172 | assert sorted(cfg.items()) == sorted(d.items()) 173 | 174 | 175 | @pytest.mark.skipif("azure is None") 176 | def test_repr(): # type: ignore 177 | cfg = AzureKeyVaultConfiguration( 178 | "fake_id", 179 | "fake_secret", 180 | "fake-tenant", 181 | "fake_vault", 182 | cache_expiration=0, 183 | ) 184 | d = DICT.copy() 185 | cfg._kv_client = FakeSecretClient(d) 186 | 187 | assert repr(cfg) == "" 188 | 189 | 190 | @pytest.mark.skipif("azure is None") 191 | def test_str(): # type: ignore 192 | cfg = AzureKeyVaultConfiguration( 193 | "fake_id", 194 | "fake_secret", 195 | "fake-tenant", 196 | "fake_vault", 197 | cache_expiration=0, 198 | ) 199 | d = DICT.copy() 200 | cfg._kv_client = FakeSecretClient(d) 201 | 202 | # str 203 | assert ( 204 | str(cfg) 205 | == "{'bar': 'bar_val', 'foo': 'foo_val', 'password': '******', 'with-underscore': 'works'}" 206 | ) 207 | assert cfg["password"] == "some passwd" 208 | 209 | 210 | @pytest.mark.skipif("azure is None") 211 | def test_reload(): # type: ignore 212 | cfg = AzureKeyVaultConfiguration( 213 | "fake_id", 214 | "fake_secret", 215 | "fake-tenant", 216 | "fake_vault", 217 | ) 218 | cfg._kv_client = FakeSecretClient(DICT) 219 | assert cfg == config_from_dict(DICT) 220 | 221 | cfg._kv_client = FakeSecretClient(DICT2) 222 | cfg.reload() 223 | assert cfg == config_from_dict(DICT2) 224 | -------------------------------------------------------------------------------- /tests/contrib/test_gcp.py: -------------------------------------------------------------------------------- 1 | """Tests for GCP support.""" 2 | 3 | # ruff: noqa: D101,D102,D103,D107,E501 4 | 5 | from collections import namedtuple 6 | from typing import Any, Dict 7 | 8 | import pytest 9 | from config import config_from_dict 10 | from pytest import raises 11 | 12 | try: 13 | from config.contrib.gcp import GCPSecretManagerConfiguration 14 | from google.api_core.exceptions import NotFound 15 | from google.cloud import secretmanager_v1 16 | except ImportError: # pragma: no cover 17 | secretmanager_v1 = None # type: ignore 18 | 19 | 20 | DICT = { 21 | "foo": "foo_val", 22 | "bar": "bar_val", 23 | "with_underscore": "works", 24 | "password": "some passwd", 25 | } 26 | 27 | DICT2 = {"a": "b", "c": "d"} 28 | 29 | Payload = namedtuple("Payload", ["data"]) 30 | 31 | 32 | class Secret: 33 | def __init__(self, name: str, value: str): 34 | self.name = name 35 | self.payload = Payload(value.encode()) 36 | 37 | 38 | class FakeSecretClient: 39 | def __init__(self, dct: dict): 40 | self._dict = dct 41 | 42 | def list_secrets(self, request: Dict[str, str]) -> list: 43 | return [Secret(f"prefix/{x}", "") for x in self._dict] 44 | 45 | def access_secret_version(self, request: Dict[str, str]) -> Secret: 46 | name = request["name"] 47 | try: 48 | return Secret(name, self._dict[name.split("/")[3]]) 49 | except KeyError: 50 | raise NotFound("") from None # type: ignore 51 | 52 | 53 | def fake_client(val: dict) -> Any: 54 | def call(*args: list, **kwargs: dict) -> FakeSecretClient: 55 | return FakeSecretClient(val) 56 | 57 | return call 58 | 59 | 60 | @pytest.mark.skipif("secretmanager_v1 is None") 61 | def test_load_dict(): # type: ignore 62 | secretmanager_v1.SecretManagerServiceClient = fake_client(DICT) 63 | cfg = GCPSecretManagerConfiguration("fake_id") 64 | assert cfg["foo"] == "foo_val" 65 | assert cfg["with_underscore"] == "works" 66 | assert cfg.get("foo", "default") == "foo_val" 67 | 68 | 69 | @pytest.mark.skipif("secretmanager_v1 is None") 70 | def test_expiration(mocker): # type: ignore 71 | # with cache 72 | secretmanager_v1.SecretManagerServiceClient = fake_client(DICT) 73 | cfg = GCPSecretManagerConfiguration("fake_id") 74 | 75 | spy = mocker.spy(cfg._client, "access_secret_version") 76 | assert cfg["foo"] == "foo_val" 77 | assert cfg["foo"] == "foo_val" 78 | assert spy.call_count == 1 79 | 80 | # without cache 81 | secretmanager_v1.SecretManagerServiceClient = fake_client(DICT) 82 | cfg = GCPSecretManagerConfiguration("fake_id", cache_expiration=0) 83 | 84 | spy = mocker.spy(cfg._client, "access_secret_version") 85 | assert cfg["foo"] == "foo_val" 86 | assert cfg["foo"] == "foo_val" # this will ignore the cache 87 | assert spy.call_count == 2 88 | 89 | 90 | @pytest.mark.skipif("secretmanager_v1 is None") 91 | def test_deletion(): # type: ignore 92 | d = DICT.copy() 93 | secretmanager_v1.SecretManagerServiceClient = fake_client(d) 94 | cfg = GCPSecretManagerConfiguration("fake_id", cache_expiration=0) 95 | 96 | assert cfg["foo"] == "foo_val" 97 | assert "foo" in cfg._cache 98 | del d["foo"] 99 | 100 | with raises(KeyError): 101 | assert cfg["foo"] is KeyError 102 | 103 | 104 | @pytest.mark.skipif("secretmanager_v1 is None") 105 | def test_missing_key(): # type: ignore 106 | d = DICT.copy() 107 | secretmanager_v1.SecretManagerServiceClient = fake_client(d) 108 | cfg = GCPSecretManagerConfiguration("fake_id", cache_expiration=0) 109 | 110 | with raises(KeyError): 111 | assert cfg["foo-missing"] is KeyError 112 | 113 | assert cfg.get("foo-missing", "default") == "default" 114 | 115 | 116 | @pytest.mark.skipif("secretmanager_v1 is None") 117 | def test_get_attr(): # type: ignore 118 | d = DICT.copy() 119 | secretmanager_v1.SecretManagerServiceClient = fake_client(d) 120 | cfg = GCPSecretManagerConfiguration("fake_id", cache_expiration=0) 121 | 122 | assert cfg.foo == "foo_val" 123 | 124 | with raises(AttributeError): 125 | assert cfg.foo_missing is AttributeError 126 | 127 | 128 | @pytest.mark.skipif("secretmanager_v1 is None") 129 | def test_dict(): # type: ignore 130 | d = DICT.copy() 131 | secretmanager_v1.SecretManagerServiceClient = fake_client(d) 132 | cfg = GCPSecretManagerConfiguration("fake_id", cache_expiration=0) 133 | 134 | assert sorted(cfg.keys()) == sorted(d.keys()) 135 | assert sorted(cfg.values()) == sorted(d.values()) 136 | assert sorted(cfg.items()) == sorted(d.items()) 137 | 138 | 139 | @pytest.mark.skipif("secretmanager_v1 is None") 140 | def test_repr(): # type: ignore 141 | d = DICT.copy() 142 | secretmanager_v1.SecretManagerServiceClient = fake_client(d) 143 | cfg = GCPSecretManagerConfiguration("fake_id", cache_expiration=0) 144 | 145 | assert repr(cfg) == "" 146 | 147 | 148 | @pytest.mark.skipif("secretmanager_v1 is None") 149 | def test_str(): # type: ignore 150 | d = DICT.copy() 151 | secretmanager_v1.SecretManagerServiceClient = fake_client(d) 152 | cfg = GCPSecretManagerConfiguration("fake_id", cache_expiration=0) 153 | 154 | # str 155 | assert ( 156 | str(cfg) 157 | == "{'bar': 'bar_val', 'foo': 'foo_val', 'password': '******', 'with_underscore': 'works'}" 158 | ) 159 | assert cfg["password"] == "some passwd" 160 | 161 | 162 | @pytest.mark.skipif("secretmanager_v1 is None") 163 | def test_reload(): # type: ignore 164 | secretmanager_v1.SecretManagerServiceClient = fake_client(DICT) 165 | cfg = GCPSecretManagerConfiguration("fake_id") 166 | assert cfg == config_from_dict(DICT) 167 | 168 | cfg._client = FakeSecretClient(DICT2) 169 | cfg.reload() 170 | assert cfg == config_from_dict(DICT2) 171 | -------------------------------------------------------------------------------- /tests/contrib/test_vault.py: -------------------------------------------------------------------------------- 1 | """Tests for Hashicorp Vault support.""" 2 | 3 | # ruff: noqa: D101,D102,D103,D107,E501 4 | 5 | from collections import namedtuple 6 | 7 | import pytest 8 | from config import config_from_dict 9 | from pytest import raises 10 | 11 | try: 12 | import hvac 13 | from config.contrib.vault import HashicorpVaultConfiguration 14 | except ImportError: # pragma: no cover 15 | hvac = None 16 | 17 | DICT = { 18 | "foo": "foo_val", 19 | "bar": "bar_val", 20 | "with_underscore": "works", 21 | "password": "some passwd", 22 | } 23 | 24 | DICT2 = {"a": "b", "c": "d"} 25 | 26 | FakeKeySecret = namedtuple("FakeKeySecret", ["key", "value"]) 27 | 28 | 29 | class FakeSecretClient: 30 | def __init__(self, engine, dct: dict): # type: ignore 31 | self._engine = engine 32 | self._dict = dct 33 | 34 | @property 35 | def kv(self): # type: ignore 36 | return self 37 | 38 | @property 39 | def v2(self): # type: ignore 40 | return self 41 | 42 | def read_secret(self, secret, mount_point): # type: ignore 43 | if mount_point == self._engine: 44 | return {"data": {"data": config_from_dict(self._dict[secret]).as_dict()}} 45 | else: 46 | raise KeyError 47 | 48 | def list(self, path): # type: ignore 49 | return {"data": {"keys": list(self._dict.keys())}} 50 | 51 | 52 | @pytest.mark.skipif("hvac is None") 53 | def test_load_dict(): # type: ignore 54 | cfg = HashicorpVaultConfiguration("engine") 55 | cfg._client = FakeSecretClient("engine", {"k": DICT}) 56 | 57 | assert cfg["k"]["foo"] == "foo_val" 58 | assert cfg["k"]["with_underscore"] == "works" 59 | assert cfg.get("k.foo", "default") == "foo_val" 60 | 61 | 62 | @pytest.mark.skipif("hvac is None") 63 | def test_expiration(mocker): # type: ignore 64 | # with cache 65 | cfg = HashicorpVaultConfiguration("engine") 66 | cfg._client = FakeSecretClient("engine", {"k": DICT}) 67 | 68 | spy = mocker.spy(cfg._client, "read_secret") 69 | assert cfg["k"]["foo"] == "foo_val" 70 | assert cfg["k"]["foo"] == "foo_val" 71 | assert spy.call_count == 1 72 | 73 | # without cache 74 | cfg = HashicorpVaultConfiguration("engine", cache_expiration=0) 75 | cfg._client = FakeSecretClient("engine", {"k": DICT}) 76 | 77 | spy = mocker.spy(cfg._client, "read_secret") 78 | assert cfg["k"]["foo"] == "foo_val" 79 | assert cfg["k"]["foo"] == "foo_val" # this will ignore the cache 80 | assert spy.call_count == 2 81 | 82 | 83 | @pytest.mark.skipif("hvac is None") 84 | def test_deletion(): # type: ignore 85 | cfg = HashicorpVaultConfiguration("engine", cache_expiration=0) 86 | d = DICT.copy() 87 | dd = {"k": d, "a": d} 88 | cfg._client = FakeSecretClient("engine", dd) 89 | 90 | assert cfg.k["foo"] == "foo_val" 91 | assert "k" in cfg._cache 92 | del dd["k"] 93 | 94 | with raises(KeyError): 95 | assert cfg["k"] is KeyError 96 | 97 | 98 | @pytest.mark.skipif("hvac is None") 99 | def test_missing_key(): # type: ignore 100 | cfg = HashicorpVaultConfiguration("engine") 101 | d = DICT.copy() 102 | cfg._client = FakeSecretClient("engine", {"k": d}) 103 | 104 | with raises(KeyError): 105 | assert cfg["not-k"] is KeyError 106 | 107 | with raises(KeyError): 108 | assert cfg["k"]["foo-missing"] is KeyError 109 | 110 | assert cfg.get("k.foo-missing", "default") == "default" 111 | 112 | 113 | @pytest.mark.skipif("hvac is None") 114 | def test_get_attr(): # type: ignore 115 | cfg = HashicorpVaultConfiguration("engine") 116 | d = DICT.copy() 117 | cfg._client = FakeSecretClient("engine", {"k": d}) 118 | 119 | assert cfg.k.foo == "foo_val" 120 | 121 | with raises(AttributeError): 122 | assert cfg.notk is AttributeError 123 | 124 | with raises(AttributeError): 125 | assert cfg.k.foo_missing is AttributeError 126 | 127 | 128 | @pytest.mark.skipif("hvac is None") 129 | def test_dict(): # type: ignore 130 | cfg = HashicorpVaultConfiguration("engine") 131 | d = DICT.copy() 132 | cfg._client = FakeSecretClient("engine", {"k": d, "a": d}) 133 | 134 | assert sorted(cfg.keys()) == sorted({"k": d, "a": d}.keys()) 135 | assert list(cfg.values()) == [d, d] 136 | assert sorted(cfg.items()) == sorted({"k": d, "a": d}.items()) 137 | 138 | 139 | @pytest.mark.skipif("hvac is None") 140 | def test_repr(): # type: ignore 141 | cfg = HashicorpVaultConfiguration("engine") 142 | d = DICT.copy() 143 | cfg._client = FakeSecretClient("engine", {"k": d}) 144 | 145 | assert repr(cfg) == "" 146 | 147 | 148 | @pytest.mark.skipif("hvac is None") 149 | def test_str(): # type: ignore 150 | cfg = HashicorpVaultConfiguration("engine") 151 | d = DICT.copy() 152 | cfg._client = FakeSecretClient("engine", {"k": d}) 153 | 154 | # str 155 | assert ( 156 | str(cfg) 157 | == "{'k.bar': 'bar_val', 'k.foo': 'foo_val', 'k.password': '******', 'k.with_underscore': 'works'}" 158 | ) 159 | assert cfg["k.password"] == "some passwd" 160 | 161 | 162 | @pytest.mark.skipif("hvac is None") 163 | def test_reload(): # type: ignore 164 | cfg = HashicorpVaultConfiguration("engine") 165 | d = DICT.copy() 166 | cfg._client = FakeSecretClient("engine", {"k": d}) 167 | assert cfg == config_from_dict({"k": DICT}) 168 | 169 | cfg._client = FakeSecretClient("engine", {"k": DICT2}) 170 | cfg.reload() 171 | assert cfg == config_from_dict({"k": DICT2}) 172 | -------------------------------------------------------------------------------- /tests/python_config.py: -------------------------------------------------------------------------------- 1 | """Example file to import as a config.""" 2 | 3 | import sys 4 | 5 | CONFIG_SYS_VERSION = sys.hexversion 6 | CONFIG_A1_B1_C1 = 1 7 | CONFIG_A1_B1_C2 = 2 8 | CONFIG_A1_B1_C3 = 3 9 | CONFIG_A1_B2_C1 = "a" 10 | CONFIG_A1_B2_C2 = True 11 | CONFIG_A1_B2_C3 = 1.1 12 | CONFIG_A2_B1_C1 = "f" 13 | CONFIG_A2_B1_C2 = False 14 | CONFIG_A2_B1_C3 = None 15 | CONFIG_A2_B2_C1 = 10 16 | CONFIG_A2_B2_C2 = "YWJjZGVmZ2g=" 17 | CONFIG_A2_B2_C3 = "abcdefgh" 18 | -------------------------------------------------------------------------------- /tests/python_config_2.py: -------------------------------------------------------------------------------- 1 | """Example file to import as a config.""" 2 | 3 | import sys 4 | 5 | CONFIG__SYS__VERSION = sys.hexversion 6 | CONFIG__A1__B1__C1 = 1 7 | CONFIG__A1__B1__C2 = 2 8 | CONFIG__A1__B1__C3 = 3 9 | CONFIG__A1__B2__C1 = "a" 10 | CONFIG__A1__B2__C2 = True 11 | CONFIG__A1__B2__C3 = 1.1 12 | CONFIG__A2__B1__C1 = "f" 13 | CONFIG__A2__B1__C2 = False 14 | CONFIG__A2__B1__C3 = None 15 | CONFIG__A2__B2__C1 = 10 16 | CONFIG__A2__B2__C2 = "YWJjZGVmZ2g=" 17 | CONFIG__A2__B2__C3 = "abcdefgh" 18 | -------------------------------------------------------------------------------- /tests/test_basic.py: -------------------------------------------------------------------------------- 1 | """Basic tests.""" 2 | 3 | # ruff: noqa: D103,E501 4 | 5 | import json 6 | 7 | import pytest 8 | from config import config_from_dict 9 | 10 | DICT = { 11 | "a1.B1.c1": 1, 12 | "a1.b1.C2": 2, 13 | "A1.b1.c3": 3, 14 | "a1.b2.c1": "a", 15 | "a1.b2.c2": True, 16 | "a1.b2.c3": 1.1, 17 | "a2.b1.c1": "f", 18 | "a2.b1.c2": False, 19 | "a2.b1.c3": None, 20 | "a2.b2.c1": 10, 21 | "a2.b2.c2": "YWJjZGVmZ2g=", 22 | "a2.b2.c3": "abcdefgh", 23 | } 24 | 25 | RESERVED = {"keys": [1, 2, 3], "values": ["a", "b", "c"], "items": [1.1, 2.1, 3.1]} 26 | 27 | NESTED = { 28 | "a1": {"b1": {"c1": 1, "C2": 2, "c3": 3}, "b2": {"c1": "a", "c2": True, "c3": 1.1}}, 29 | } 30 | 31 | PROTECTED = { 32 | "important_password": "abc", 33 | "very_secret": "SeCReT", 34 | "clear_text": "abc", 35 | "url": "protocol://user:pass@hostname:port/path", 36 | "url2": "protocol://user@hostname:port/path", 37 | } 38 | 39 | 40 | def test_version_is_defined(): # type: ignore 41 | from config import __version__, __version_tuple__ 42 | 43 | assert isinstance(__version__, str) 44 | assert isinstance(__version_tuple__, tuple) 45 | 46 | 47 | def test_load_dict(): # type: ignore 48 | cfg = config_from_dict(DICT, lowercase_keys=True) 49 | assert cfg["a1.b1.c1"] == 1 50 | assert cfg["a1.b1"].as_dict() == {"c1": 1, "c2": 2, "c3": 3} 51 | assert cfg["a1.b2"].as_dict() == {"c1": "a", "c2": True, "c3": 1.1} 52 | 53 | 54 | def test_load_nested(): # type: ignore 55 | cfg = config_from_dict(NESTED, lowercase_keys=True) 56 | assert cfg["a1.b1.c1"] == 1 57 | assert cfg["a1.b1"].as_dict() == {"c1": 1, "c2": 2, "c3": 3} 58 | assert cfg["a1.b2"].as_dict() == {"c1": "a", "c2": True, "c3": 1.1} 59 | 60 | 61 | def test_gets(): # type: ignore 62 | cfg = config_from_dict(DICT) 63 | assert cfg["a1.b2"].get("r") is None 64 | assert cfg["a1.b2"].get("c3") == 1.1 65 | assert cfg["a1"].get_dict("b2") == {"c1": "a", "c2": True, "c3": 1.1} 66 | assert cfg.get_dict("a1") == { 67 | "B1.c1": 1, 68 | "b1.C2": 2, 69 | "b2.c1": "a", 70 | "b2.c2": True, 71 | "b2.c3": 1.1, 72 | } 73 | 74 | 75 | def test_attr_gets(): # type: ignore 76 | cfg = config_from_dict(DICT) 77 | assert cfg.a1.b2.get("r") is None 78 | assert cfg.a1.b2.get("c3") == 1.1 79 | assert cfg.a1.get_dict("b2") == {"c1": "a", "c2": True, "c3": 1.1} 80 | 81 | 82 | def test_base64(): # type: ignore 83 | cfg = config_from_dict(DICT) 84 | assert cfg.base64encode("a2.b2.c3") == b"YWJjZGVmZ2g=" 85 | assert cfg.base64decode("a2.b2.c2") == b"abcdefgh" 86 | 87 | 88 | def test_reserved(): # type: ignore 89 | cfg = config_from_dict(RESERVED) 90 | assert cfg["keys"] == [1, 2, 3] 91 | assert cfg["values"] == ["a", "b", "c"] 92 | assert cfg["items"] == [1.1, 2.1, 3.1] 93 | 94 | assert cfg.as_dict() == RESERVED 95 | with pytest.raises(TypeError): # fails as the config has an entry for keys 96 | dict(cfg) 97 | 98 | 99 | def test_fails(): # type: ignore 100 | cfg = config_from_dict(DICT) 101 | with pytest.raises(KeyError, match="a1.b2.c3.d4"): 102 | assert cfg["a1.b2.c3.d4"] is Exception 103 | 104 | with pytest.raises(AttributeError, match="c4"): 105 | assert cfg.a1.b2.c4 is Exception 106 | 107 | with pytest.raises(ValueError, match="Expected a valid True or False expression."): 108 | assert cfg["a1.b2"].get_bool("c3") is Exception 109 | 110 | 111 | def test_type_conversions(): # type: ignore 112 | cfg = config_from_dict(DICT, lowercase_keys=True) 113 | assert cfg["a1.b2"].get_float("c3") == 1.1 114 | assert cfg["a1.b2"].get_int("c3") == 1 115 | assert cfg["a1.b2"].get_str("c3") == "1.1" 116 | assert cfg["a1.b2"].get_str("c3", "{:0.3f}") == "1.100" 117 | assert cfg.a1.b2.c3 == 1.1 118 | assert dict(cfg.a1.b2) == {"c1": "a", "c2": True, "c3": 1.1} 119 | 120 | assert cfg["a1.b2"].get_bool("c2") is True # True 121 | assert cfg["a1.b1"].get_bool("c1") is True # 1 122 | assert cfg["a2.b1"].get_bool("c3") is False # None 123 | assert cfg["a2.b1"].get_bool("c2") is False # False 124 | assert cfg["a2.b1"].get_bool("c1") is False # 'f' 125 | 126 | 127 | def test_repr_and_str(): # type: ignore 128 | cfg = config_from_dict(DICT, lowercase_keys=True) 129 | 130 | # repr 131 | assert hex(id(cfg)) in repr(cfg) 132 | 133 | # str 134 | assert ( 135 | str(cfg) 136 | == "{'a1.b1.c1': 1, 'a1.b1.c2': 2, 'a1.b1.c3': 3, 'a1.b2.c1': 'a', 'a1.b2.c2': True," 137 | " 'a1.b2.c3': 1.1, 'a2.b1.c1': 'f', 'a2.b1.c2': False, 'a2.b1.c3': None," 138 | " 'a2.b2.c1': 10, 'a2.b2.c2': 'YWJjZGVmZ2g=', 'a2.b2.c3': 'abcdefgh'}" 139 | ) 140 | 141 | # protected 142 | cfg = config_from_dict(PROTECTED, lowercase_keys=True) 143 | assert ( 144 | str(cfg) == "{'clear_text': 'abc', 'important_password': '******', " 145 | "'url': 'protocol://user:******@hostname/path', 'url2': 'protocol://user@hostname:port/path', " 146 | "'very_secret': '******'}" 147 | ) 148 | 149 | 150 | def test_dict_methods_keys(): # type: ignore 151 | cfg = config_from_dict(DICT, lowercase_keys=True) 152 | 153 | assert set(cfg.keys()) == { 154 | "a1", 155 | "a2", 156 | } 157 | with cfg.dotted_iter() as cfg_: 158 | assert set(cfg_.keys()) == { 159 | "a1.b2.c3", 160 | "a2.b1.c3", 161 | "a2.b2.c2", 162 | "a2.b2.c3", 163 | "a1.b2.c2", 164 | "a2.b1.c2", 165 | "a1.b2.c1", 166 | "a1.b1.c3", 167 | "a2.b1.c1", 168 | "a1.b1.c1", 169 | "a1.b1.c2", 170 | "a2.b2.c1", 171 | } 172 | assert set(cfg.keys(levels=1)) == {"a1", "a2"} 173 | assert set(cfg.keys(levels=2)) == {"a1.b2", "a2.b2", "a2.b1", "a1.b1"} 174 | assert set(cfg.keys(levels=3)) == { 175 | "a1.b2.c3", 176 | "a2.b1.c3", 177 | "a2.b2.c2", 178 | "a2.b2.c3", 179 | "a1.b2.c2", 180 | "a2.b1.c2", 181 | "a1.b2.c1", 182 | "a1.b1.c3", 183 | "a2.b1.c1", 184 | "a1.b1.c1", 185 | "a1.b1.c2", 186 | "a2.b2.c1", 187 | } 188 | assert set(cfg.keys(levels=100)) == { 189 | "a1.b2.c3", 190 | "a2.b1.c3", 191 | "a2.b2.c2", 192 | "a2.b2.c3", 193 | "a1.b2.c2", 194 | "a2.b1.c2", 195 | "a1.b2.c1", 196 | "a1.b1.c3", 197 | "a2.b1.c1", 198 | "a1.b1.c1", 199 | "a1.b1.c2", 200 | "a2.b2.c1", 201 | } 202 | 203 | with pytest.raises(AssertionError): 204 | set(cfg.keys(levels=0)) 205 | 206 | 207 | def test_dict_methods_items(): # type: ignore 208 | cfg = config_from_dict(DICT, lowercase_keys=True) 209 | 210 | assert dict(cfg.items()) == { 211 | "a1": { 212 | "b1.c1": 1, 213 | "b1.c2": 2, 214 | "b1.c3": 3, 215 | "b2.c1": "a", 216 | "b2.c2": True, 217 | "b2.c3": 1.1, 218 | }, 219 | "a2": { 220 | "b1.c1": "f", 221 | "b1.c2": False, 222 | "b1.c3": None, 223 | "b2.c1": 10, 224 | "b2.c2": "YWJjZGVmZ2g=", 225 | "b2.c3": "abcdefgh", 226 | }, 227 | } 228 | with cfg.dotted_iter() as cfg_: 229 | assert set(cfg_.items()) == { 230 | ("a1.b1.c3", 3), 231 | ("a2.b1.c3", None), 232 | ("a1.b2.c1", "a"), 233 | ("a2.b2.c2", "YWJjZGVmZ2g="), 234 | ("a2.b1.c1", "f"), 235 | ("a1.b1.c1", 1), 236 | ("a2.b2.c3", "abcdefgh"), 237 | ("a1.b2.c2", True), 238 | ("a1.b2.c3", 1.1), 239 | ("a2.b2.c1", 10), 240 | ("a1.b1.c2", 2), 241 | ("a2.b1.c2", False), 242 | } 243 | assert dict(cfg.items(levels=1)) == { 244 | "a1": { 245 | "b1.c1": 1, 246 | "b1.c2": 2, 247 | "b1.c3": 3, 248 | "b2.c1": "a", 249 | "b2.c2": True, 250 | "b2.c3": 1.1, 251 | }, 252 | "a2": { 253 | "b1.c1": "f", 254 | "b1.c2": False, 255 | "b1.c3": None, 256 | "b2.c1": 10, 257 | "b2.c2": "YWJjZGVmZ2g=", 258 | "b2.c3": "abcdefgh", 259 | }, 260 | } 261 | assert dict(cfg.items(levels=2)) == { 262 | "a1.b1": {"c1": 1, "c2": 2, "c3": 3}, 263 | "a1.b2": {"c1": "a", "c2": True, "c3": 1.1}, 264 | "a2.b1": {"c1": "f", "c2": False, "c3": None}, 265 | "a2.b2": {"c1": 10, "c2": "YWJjZGVmZ2g=", "c3": "abcdefgh"}, 266 | } 267 | assert set(cfg.items(levels=3)) == { 268 | ("a1.b1.c3", 3), 269 | ("a2.b1.c3", None), 270 | ("a1.b2.c1", "a"), 271 | ("a2.b2.c2", "YWJjZGVmZ2g="), 272 | ("a2.b1.c1", "f"), 273 | ("a1.b1.c1", 1), 274 | ("a2.b2.c3", "abcdefgh"), 275 | ("a1.b2.c2", True), 276 | ("a1.b2.c3", 1.1), 277 | ("a2.b2.c1", 10), 278 | ("a1.b1.c2", 2), 279 | ("a2.b1.c2", False), 280 | } 281 | assert set(cfg.items(levels=100)) == { 282 | ("a1.b1.c3", 3), 283 | ("a2.b1.c3", None), 284 | ("a1.b2.c1", "a"), 285 | ("a2.b2.c2", "YWJjZGVmZ2g="), 286 | ("a2.b1.c1", "f"), 287 | ("a1.b1.c1", 1), 288 | ("a2.b2.c3", "abcdefgh"), 289 | ("a1.b2.c2", True), 290 | ("a1.b2.c3", 1.1), 291 | ("a2.b2.c1", 10), 292 | ("a1.b1.c2", 2), 293 | ("a2.b1.c2", False), 294 | } 295 | with pytest.raises(AssertionError): 296 | set(cfg.items(levels=0)) 297 | 298 | 299 | def test_dict_methods_values(): # type: ignore 300 | cfg = config_from_dict(DICT, lowercase_keys=True) 301 | 302 | assert sorted(json.dumps(x, sort_keys=True) for x in cfg.values()) == sorted( 303 | [ 304 | json.dumps( 305 | { 306 | "b1.c1": "f", 307 | "b1.c2": False, 308 | "b1.c3": None, 309 | "b2.c1": 10, 310 | "b2.c2": "YWJjZGVmZ2g=", 311 | "b2.c3": "abcdefgh", 312 | }, 313 | sort_keys=True, 314 | ), 315 | json.dumps( 316 | { 317 | "b1.c1": 1, 318 | "b1.c2": 2, 319 | "b1.c3": 3, 320 | "b2.c1": "a", 321 | "b2.c2": True, 322 | "b2.c3": 1.1, 323 | }, 324 | sort_keys=True, 325 | ), 326 | ], 327 | ) 328 | 329 | with cfg.dotted_iter() as cfg_: 330 | assert set(cfg_.values()) == { 331 | False, 332 | True, 333 | 2, 334 | 3, 335 | "f", 336 | 1.1, 337 | 10, 338 | None, 339 | "abcdefgh", 340 | "a", 341 | "YWJjZGVmZ2g=", 342 | } 343 | 344 | assert sorted( 345 | json.dumps(x, sort_keys=True) for x in cfg.values(levels=1) 346 | ) == sorted( 347 | [ 348 | json.dumps( 349 | { 350 | "b1.c1": "f", 351 | "b1.c2": False, 352 | "b1.c3": None, 353 | "b2.c1": 10, 354 | "b2.c2": "YWJjZGVmZ2g=", 355 | "b2.c3": "abcdefgh", 356 | }, 357 | sort_keys=True, 358 | ), 359 | json.dumps( 360 | { 361 | "b1.c1": 1, 362 | "b1.c2": 2, 363 | "b1.c3": 3, 364 | "b2.c1": "a", 365 | "b2.c2": True, 366 | "b2.c3": 1.1, 367 | }, 368 | sort_keys=True, 369 | ), 370 | ], 371 | ) 372 | 373 | 374 | def test_dict_methods_dict(): # type: ignore 375 | cfg = config_from_dict(DICT, lowercase_keys=True) 376 | 377 | a1 = { 378 | "b1.c1": 1, 379 | "b1.c2": 2, 380 | "b1.c3": 3, 381 | "b2.c1": "a", 382 | "b2.c2": True, 383 | "b2.c3": 1.1, 384 | } 385 | a2 = { 386 | "b1.c1": "f", 387 | "b1.c2": False, 388 | "b1.c3": None, 389 | "b2.c1": 10, 390 | "b2.c2": "YWJjZGVmZ2g=", 391 | "b2.c3": "abcdefgh", 392 | } 393 | 394 | # as_dict and get_dict always returns dotted keys 395 | assert cfg.as_dict() == {k.lower(): v for k, v in DICT.items()} 396 | assert cfg.get_dict("a1") == a1 397 | assert cfg.get_dict("a1.b2") == {"c1": "a", "c2": True, "c3": 1.1} 398 | # dict() uses the iterator methods and will return a nested dict by default 399 | assert dict(cfg) == {"a1": a1, "a2": a2} 400 | 401 | with cfg.dotted_iter(): 402 | # as_dict and get_dict always returns dotted keys 403 | assert cfg.as_dict() == {k.lower(): v for k, v in DICT.items()} 404 | assert cfg.get_dict("a1") == a1 405 | assert cfg.get_dict("a1.b2") == {"c1": "a", "c2": True, "c3": 1.1} 406 | # in this case the iterators will return all the (dotted) keys, so dict() is the same as .as_dict() 407 | assert dict(cfg) == {k.lower(): v for k, v in DICT.items()} 408 | 409 | 410 | def test_eq(): # type: ignore 411 | cfg = config_from_dict(DICT, lowercase_keys=True) 412 | 413 | nested = { 414 | "a1": { 415 | "b1": {"c1": 1, "c2": 2, "c3": 3}, 416 | "b2": {"c1": "a", "c2": True, "c3": 1.1}, 417 | }, 418 | "a2": { 419 | "b1": {"c1": "f", "c2": False, "c3": None}, 420 | "b2": {"c1": 10, "c2": "YWJjZGVmZ2g=", "c3": "abcdefgh"}, 421 | }, 422 | } 423 | 424 | # equality with dictionaries -- the second one fails as it's a dict comparison 425 | assert cfg.as_dict() == {k.lower(): v for k, v in DICT.items()} 426 | assert cfg.as_dict() != nested 427 | # equality with dictionaries -- in this case the second one passes 428 | assert cfg == {k.lower(): v for k, v in DICT.items()} 429 | assert cfg == nested 430 | -------------------------------------------------------------------------------- /tests/test_configuration_set.py: -------------------------------------------------------------------------------- 1 | """Tests for Configuration Sets.""" 2 | 3 | # ruff: noqa: D103,E501 4 | 5 | import json 6 | import os 7 | import sys 8 | 9 | from config import ( 10 | ConfigurationSet, 11 | config, 12 | config_from_dict, 13 | config_from_dotenv, 14 | config_from_env, 15 | config_from_ini, 16 | config_from_json, 17 | config_from_path, 18 | config_from_python, 19 | create_path_from_config, 20 | ) 21 | 22 | try: 23 | import yaml 24 | except ImportError: 25 | yaml = None 26 | if sys.version_info < (3, 11): # pragma: no cover 27 | try: 28 | import tomli as toml 29 | 30 | except ImportError: 31 | toml = None # type: ignore 32 | else: # pragma: no cover 33 | import tomllib as toml 34 | import pytest 35 | 36 | DICT1 = { 37 | "a1.B1.c1": 1, 38 | "a1.b1.C2": 2, 39 | "A1.b1.c3": 3, 40 | "a1.b2.c1": "a", 41 | "a1.b2.c2": True, 42 | "a1.b2.c3": 1.1, 43 | } 44 | DICT2_1 = {"a2.b1.c1": "f", "a2.b1.c2": False, "a2.B1.c3": None} 45 | DICT2_2 = {"a2.b2.c1": 10, "a2.b2.c2": "YWJjZGVmZ2g=", "a2.b2.C3": "abcdefgh"} 46 | DICT3_1 = { 47 | "a2.b2.c1": 10, 48 | "a2.b2.c2": "YWJjZGVmZ2g=", 49 | "a2.b2.C3": "abcdefgh", 50 | "z1": 100, 51 | } 52 | DICT3_2 = {"a2": 10, "z1.w2": 123, "z1.w3": "abc"} 53 | DICT3_3 = {"a2.g2": 10, "a2.w2": 123, "a2.w3": "abc"} 54 | DICT3 = { 55 | "a3.b1.c1": "af", 56 | "a3.b1.c2": True, 57 | "a3.b1.c3": None, 58 | "a3.b2.c1": 104, 59 | "a3.b2.c2": "YWJjZGVmZ2g=", 60 | "a3.b2.c3": "asdfdsbcdefgh", 61 | } 62 | JSON = json.dumps(DICT3) 63 | 64 | DICT4 = { 65 | "a3.b1.c1": "afsdf", 66 | "a3.b1.c2": False, 67 | "a3.b1.c3": None, 68 | "a3.b2.c1": 107, 69 | "a3.b2.c2": "YWsdfsJjZGVmZ2g=", 70 | "a3.b2.c3": "asdfdssdfbcdefgh", 71 | } 72 | JSON2 = json.dumps(DICT4) 73 | 74 | 75 | if yaml: 76 | YAML = """ 77 | z1: 78 | w1: 1 79 | w2: null 80 | w3: abc 81 | z2: 82 | w1: 1.1 83 | w2: 84 | - a 85 | - b 86 | - c 87 | w3: 88 | p1: 1 89 | p2: 5.4 90 | """ 91 | 92 | DICT_YAML = { 93 | "z1.w1": 1, 94 | "z1.w2": None, 95 | "z1.w3": "abc", 96 | "z2.w1": 1.1, 97 | "z2.w2": ["a", "b", "c"], 98 | "z2.w3": {"p1": 1, "p2": 5.4}, 99 | } 100 | 101 | if toml: 102 | TOML = """ 103 | [owner] 104 | name = "ABC" 105 | [database] 106 | server = "192.168.1.1" 107 | ports = [ 8001, 8001, 8002,] 108 | connection_max = 5000 109 | enabled = true 110 | [clients] 111 | data = [ [ "gamma", "delta",], [ 1, 2,],] 112 | hosts = [ "alpha", "omega",] 113 | [servers.alpha] 114 | ip = "10.0.0.1" 115 | dc = "eqdc10" 116 | [servers.beta] 117 | ip = "10.0.0.2" 118 | dc = "eqdc10" 119 | """ 120 | 121 | DICT_TOML = { 122 | "owner": {"name": "ABC"}, 123 | "database": { 124 | "server": "192.168.1.1", 125 | "ports": [8001, 8001, 8002], 126 | "connection_max": 5000, 127 | "enabled": True, 128 | }, 129 | "clients": {"data": [["gamma", "delta"], [1, 2]], "hosts": ["alpha", "omega"]}, 130 | "servers": { 131 | "alpha": {"ip": "10.0.0.1", "dc": "eqdc10"}, 132 | "beta": {"ip": "10.0.0.2", "dc": "eqdc10"}, 133 | }, 134 | } 135 | 136 | INI = """ 137 | [section1] 138 | key1 = True 139 | 140 | [section2] 141 | key1 = abc 142 | key2 = def 143 | key3 = 1.1 144 | 145 | [section3] 146 | key1 = 1 147 | key2 = 0 148 | """ 149 | 150 | DICT_INI = { 151 | "section1.key1": "True", 152 | "section2.key1": "abc", 153 | "section2.key2": "def", 154 | "section2.key3": "1.1", 155 | "section3.key1": "1", 156 | "section3.key2": "0", 157 | } 158 | 159 | DOTENV = """ 160 | dotenv1 = abc 161 | dotenv2 = 1.2 162 | dotenv3 = xyz 163 | """ 164 | 165 | DOTENV_PREFIX = """ 166 | CONFIG__dotenv1 = abc 167 | CONFIG__dotenv2 = 1.2 168 | CONFIG__dotenv3 = xyz 169 | """ 170 | 171 | 172 | DICT_DOTENV = { 173 | "dotenv1": "abc", 174 | "dotenv2": "1.2", 175 | "dotenv3": "xyz", 176 | } 177 | 178 | PATH_DICT = { 179 | "sdf.dsfsfd": 1, 180 | "sdjf.wquwe": "sdfsd", 181 | "sdjf.wquwe43": None, 182 | "sdjf.wquwse43": True, 183 | } 184 | 185 | PREFIX = "CONFIG" 186 | 187 | os.environ.update( 188 | (PREFIX + "__" + k.replace(".", "__").upper(), str(v)) for k, v in DICT1.items() 189 | ) 190 | 191 | 192 | def test_load_env(): # type: ignore 193 | cfg = ConfigurationSet( 194 | config_from_dict(DICT2_1, lowercase_keys=True), 195 | config_from_dict(DICT2_2, lowercase_keys=True), 196 | config_from_env(prefix=PREFIX, lowercase_keys=True), 197 | ) 198 | # from env 199 | assert cfg["a1.b1.c1"] == "1" 200 | assert cfg["a1.b1"].get_int("c1") == 1 201 | assert cfg["a1.b1"].as_dict() == {"c1": "1", "c2": "2", "c3": "3"} 202 | assert cfg["a1.b2"].as_dict() == {"c1": "a", "c2": "True", "c3": "1.1"} 203 | # from dict 204 | assert cfg["a2.b1.c1"] == "f" 205 | assert cfg["a2.b2"].as_dict() == {"c1": 10, "c2": "YWJjZGVmZ2g=", "c3": "abcdefgh"} 206 | 207 | 208 | def test_fails(): # type: ignore 209 | cfg = ConfigurationSet( 210 | config_from_dict(DICT2_1, lowercase_keys=True), 211 | config_from_dict(DICT2_2, lowercase_keys=True), 212 | config_from_env(prefix=PREFIX, lowercase_keys=True), 213 | ) 214 | 215 | with pytest.raises(KeyError, match="a1.b2.c3.d4"): 216 | assert cfg["a1.b2.c3.d4"] is Exception 217 | 218 | with pytest.raises(AttributeError, match="c4"): 219 | assert cfg.a1.b2.c4 is Exception 220 | 221 | with pytest.raises(ValueError, match="Expected a valid True or False expression."): 222 | assert cfg["a1.b2"].get_bool("c3") is Exception 223 | 224 | 225 | def test_get(): # type: ignore 226 | cfg = ConfigurationSet( 227 | config_from_dict(DICT2_1, lowercase_keys=True), 228 | config_from_dict(DICT2_2, lowercase_keys=True), 229 | config_from_env(prefix=PREFIX, lowercase_keys=True), 230 | ) 231 | 232 | assert cfg.get("a2.b2") == config_from_dict( 233 | {"c1": 10, "c2": "YWJjZGVmZ2g=", "c3": "abcdefgh"}, 234 | ) 235 | assert cfg.get("a2.b5", "1") == "1" 236 | 237 | 238 | def test_get_dict(): # type: ignore 239 | cfg = ConfigurationSet( 240 | config_from_dict(DICT2_1, lowercase_keys=True), 241 | config_from_dict(DICT2_2, lowercase_keys=True), 242 | config_from_env(prefix=PREFIX, lowercase_keys=True), 243 | ) 244 | 245 | a2 = { 246 | "b2.c1": 10, 247 | "b1.c1": "f", 248 | "b1.c2": False, 249 | "b1.c3": None, 250 | "b2.c2": "YWJjZGVmZ2g=", 251 | "b2.c3": "abcdefgh", 252 | } 253 | a2nested = { 254 | "b1": {"c1": "f", "c2": False, "c3": None}, 255 | "b2": {"c1": 10, "c2": "YWJjZGVmZ2g=", "c3": "abcdefgh"}, 256 | } 257 | 258 | assert cfg.get_dict("a2") == a2 259 | assert cfg.a2.as_dict() == a2 260 | assert dict(cfg.a2) == a2nested 261 | with cfg.dotted_iter(): 262 | assert cfg.get_dict("a2") == a2 263 | assert cfg.a2.as_dict() == a2 264 | # note that this still returns he nested dict since the dotted iteration 265 | # impacts only the parent cfg, not cfg.a 266 | assert dict(cfg.a2) == a2nested 267 | # to use dotted iteration for children, we need to explicitly set it 268 | with cfg.a2.dotted_iter() as cfg_a2: 269 | assert dict(cfg_a2) == a2 270 | 271 | with pytest.raises(KeyError): 272 | assert cfg.get_dict("a3") is Exception 273 | 274 | assert dict(cfg.a2) == dict(cfg.a2.items()) 275 | 276 | 277 | def test_get_dict_different_types(): # type: ignore 278 | cfg = ConfigurationSet( 279 | config_from_dict(DICT3_1, lowercase_keys=True), 280 | config_from_dict(DICT3_2, lowercase_keys=True), # a2 is ignored here 281 | config_from_dict(DICT3_3, lowercase_keys=True), 282 | ) 283 | 284 | a2 = { 285 | "b2.c1": 10, 286 | "b2.c2": "YWJjZGVmZ2g=", 287 | "b2.c3": "abcdefgh", 288 | "g2": 10, 289 | "w2": 123, 290 | "w3": "abc", 291 | } 292 | a2nested = { 293 | "b2": {"c1": 10, "c2": "YWJjZGVmZ2g=", "c3": "abcdefgh"}, 294 | "g2": 10, 295 | "w2": 123, 296 | "w3": "abc", 297 | } 298 | 299 | assert cfg.get_dict("a2") == a2 300 | assert cfg.a2.as_dict() == a2 301 | assert dict(cfg.a2) == a2nested 302 | 303 | with cfg.dotted_iter(): 304 | assert cfg.get_dict("a2") == a2 305 | assert cfg.a2.as_dict() == a2 306 | # note that this still returns he nested dict since the dotted iteration 307 | # impacts only the parent cfg, not cfg.a 308 | assert dict(cfg.a2) == a2nested 309 | # to use dotted iteration for children, we need to explicitly set it 310 | with cfg.a2.dotted_iter() as cfg_a2: 311 | assert dict(cfg_a2) == a2 312 | 313 | with pytest.raises(TypeError): # the first configuration overrides the type 314 | assert cfg.get_dict("z1") is Exception 315 | assert cfg.z1 == 100 316 | 317 | 318 | def test_repr_and_str(): # type: ignore 319 | import sys 320 | 321 | path = os.path.join(os.path.dirname(__file__), "python_config.py") 322 | cfg = ConfigurationSet( 323 | config_from_dict(DICT2_1, lowercase_keys=True), 324 | config_from_dict(DICT2_2, lowercase_keys=True), 325 | config_from_env(prefix=PREFIX, lowercase_keys=True), 326 | config_from_python(path, prefix="CONFIG", lowercase_keys=True), 327 | ) 328 | 329 | joined_dicts = {k: str(v) for k, v in DICT1.items()} 330 | joined_dicts.update(DICT2_1) 331 | joined_dicts.update(DICT2_2) 332 | joined_dicts["sys.version"] = sys.hexversion 333 | assert hex(id(cfg)) in repr(cfg) 334 | 335 | assert ( 336 | str(cfg) 337 | == "{'a1.b1.c1': '1', 'a1.b1.c2': '2', 'a1.b1.c3': '3', 'a1.b2.c1': 'a', 'a1.b2.c2': 'True', " 338 | "'a1.b2.c3': '1.1', 'a2.b1.c1': 'f', 'a2.b1.c2': False, 'a2.b1.c3': None, 'a2.b2.c1': 10, " 339 | "'a2.b2.c2': 'YWJjZGVmZ2g=', 'a2.b2.c3': 'abcdefgh', 'sys.version': " 340 | + str(sys.hexversion) 341 | + "}" 342 | ) 343 | 344 | 345 | def test_alternate_set_loader(): # type: ignore 346 | import sys 347 | 348 | path = os.path.join(os.path.dirname(__file__), "python_config.py") 349 | 350 | import tempfile 351 | 352 | with tempfile.TemporaryDirectory() as folder: 353 | create_path_from_config(folder, config_from_dict(PATH_DICT), remove_level=0) 354 | entries = [ 355 | DICT2_1, # assumes dict 356 | ("dict", DICT2_2), 357 | ("env", PREFIX), 358 | ("python", path, "CONFIG"), 359 | ("json", JSON), 360 | ("ini", INI), 361 | ("dotenv", DOTENV), 362 | ("path", folder, 0), 363 | ] 364 | if yaml: 365 | entries.append(("yaml", YAML)) 366 | if toml: 367 | entries.append(("toml", TOML)) 368 | cfg = config(*entries, lowercase_keys=True) 369 | 370 | joined_dicts = {k: str(v) for k, v in DICT1.items()} 371 | joined_dicts.update(DICT2_1) 372 | joined_dicts.update(DICT2_2) 373 | joined_dicts.update(DICT3) 374 | joined_dicts.update(DICT_INI) 375 | joined_dicts.update(DICT_DOTENV) 376 | if yaml: 377 | joined_dicts.update(DICT_YAML) 378 | if toml: 379 | joined_dicts.update(DICT_TOML) 380 | joined_dicts.update((k, str(v)) for k, v in PATH_DICT.items()) 381 | joined_dicts["sys.version"] = sys.hexversion 382 | assert ( 383 | config_from_dict(joined_dicts, lowercase_keys=True).as_dict() == cfg.as_dict() 384 | ) 385 | assert config_from_dict(joined_dicts, lowercase_keys=True) == cfg 386 | 387 | 388 | def test_alternate_set_loader_prefix(): # type: ignore 389 | import sys 390 | 391 | path = os.path.join(os.path.dirname(__file__), "python_config.py") 392 | 393 | import tempfile 394 | 395 | with tempfile.TemporaryDirectory() as folder: 396 | create_path_from_config(folder, config_from_dict(PATH_DICT), remove_level=0) 397 | cfg = config( 398 | DICT2_1, # assumes dict 399 | ("dict", DICT2_2), 400 | ("env",), 401 | ("python", path), 402 | ("json", JSON), 403 | ("ini", INI), 404 | ("dotenv", DOTENV), 405 | ("path", folder, 0), 406 | prefix="CONFIG", 407 | lowercase_keys=True, 408 | ) 409 | 410 | joined_dicts = {k: str(v) for k, v in DICT1.items()} 411 | joined_dicts.update(DICT2_1) 412 | joined_dicts.update(DICT2_2) 413 | joined_dicts.update(DICT3) 414 | joined_dicts.update(DICT_INI) 415 | joined_dicts.update(DICT_DOTENV) 416 | joined_dicts.update((k, str(v)) for k, v in PATH_DICT.items()) 417 | joined_dicts["sys.version"] = sys.hexversion 418 | assert ( 419 | config_from_dict(joined_dicts, lowercase_keys=True).as_dict() == cfg.as_dict() 420 | ) 421 | assert config_from_dict(joined_dicts, lowercase_keys=True) == cfg 422 | 423 | 424 | def test_alternate_set_loader_strings(): # type: ignore 425 | import sys 426 | 427 | path = str(os.path.join(os.path.dirname(__file__), "python_config.py")) 428 | 429 | import tempfile 430 | 431 | with tempfile.TemporaryDirectory() as folder, tempfile.NamedTemporaryFile( 432 | suffix=".json", 433 | ) as f1, tempfile.NamedTemporaryFile( 434 | suffix=".ini", 435 | ) as f2, tempfile.NamedTemporaryFile( 436 | suffix=".yaml", 437 | ) as f3, tempfile.NamedTemporaryFile( 438 | suffix=".toml", 439 | ) as f4, tempfile.NamedTemporaryFile( 440 | suffix=".env", 441 | ) as f5: 442 | # path 443 | subfolder = folder + "/sub" 444 | os.makedirs(subfolder) 445 | create_path_from_config(subfolder, config_from_dict(PATH_DICT), remove_level=1) 446 | # json 447 | f1.file.write(JSON.encode()) 448 | f1.file.flush() 449 | # ini 450 | f2.file.write(INI.encode()) 451 | f2.file.flush() 452 | # ini 453 | f5.file.write(DOTENV_PREFIX.encode()) 454 | f5.file.flush() 455 | 456 | entries = [ 457 | DICT2_1, # dict 458 | DICT2_2, 459 | "env", 460 | path, # python 461 | f1.name, # json 462 | f2.name, # ini 463 | f5.name, # .env 464 | folder, # path 465 | ] 466 | if yaml: 467 | f3.file.write(YAML.encode()) 468 | f3.file.flush() 469 | entries.append(f3.name) # yaml 470 | if toml: 471 | f4.file.write(TOML.encode()) 472 | f4.file.flush() 473 | entries.append(f4.name) # toml 474 | 475 | cfg = config(*entries, prefix="CONFIG", lowercase_keys=True) 476 | 477 | joined_dicts = {k: str(v) for k, v in DICT1.items()} 478 | joined_dicts.update(DICT2_1) 479 | joined_dicts.update(DICT2_2) 480 | joined_dicts.update(DICT3) 481 | joined_dicts.update(DICT_INI) 482 | joined_dicts.update(DICT_DOTENV) 483 | if yaml: 484 | joined_dicts.update(DICT_YAML) 485 | if toml: 486 | joined_dicts.update(DICT_TOML) 487 | joined_dicts.update((k, str(v)) for k, v in PATH_DICT.items()) 488 | joined_dicts["sys.version"] = sys.hexversion 489 | assert ( 490 | config_from_dict(joined_dicts, lowercase_keys=True).as_dict() == cfg.as_dict() 491 | ) 492 | assert config_from_dict(joined_dicts, lowercase_keys=True) == cfg 493 | 494 | 495 | def test_alternate_set_loader_strings_python_module(): # type: ignore 496 | import sys 497 | 498 | module = "tests.python_config" 499 | 500 | import tempfile 501 | 502 | with tempfile.TemporaryDirectory() as folder, tempfile.NamedTemporaryFile( 503 | suffix=".json", 504 | ) as f1, tempfile.NamedTemporaryFile( 505 | suffix=".ini", 506 | ) as f2, tempfile.NamedTemporaryFile( 507 | suffix=".yaml", 508 | ) as f3, tempfile.NamedTemporaryFile( 509 | suffix=".toml", 510 | ) as f4: 511 | # path 512 | subfolder = folder + "/sub" 513 | os.makedirs(subfolder) 514 | create_path_from_config(subfolder, config_from_dict(PATH_DICT), remove_level=1) 515 | # json 516 | f1.file.write(JSON.encode()) 517 | f1.file.flush() 518 | # ini 519 | f2.file.write(INI.encode()) 520 | f2.file.flush() 521 | 522 | entries = [ 523 | DICT2_1, # dict 524 | DICT2_2, 525 | "env", 526 | module, # python 527 | f1.name, # json 528 | f2.name, # ini 529 | folder, # path 530 | ] 531 | 532 | if yaml: 533 | f3.file.write(YAML.encode()) 534 | f3.file.flush() 535 | entries.append(f3.name) 536 | if toml: 537 | f4.file.write(TOML.encode()) 538 | f4.file.flush() 539 | entries.append(f4.name) # toml 540 | 541 | cfg = config(*entries, prefix="CONFIG", lowercase_keys=True) 542 | 543 | joined_dicts = {k: str(v) for k, v in DICT1.items()} 544 | joined_dicts.update(DICT2_1) 545 | joined_dicts.update(DICT2_2) 546 | joined_dicts.update(DICT3) 547 | joined_dicts.update(DICT_INI) 548 | if yaml: 549 | joined_dicts.update(DICT_YAML) 550 | if toml: 551 | joined_dicts.update(DICT_TOML) 552 | joined_dicts.update((k, str(v)) for k, v in PATH_DICT.items()) 553 | joined_dicts["sys.version"] = sys.hexversion 554 | assert ( 555 | config_from_dict(joined_dicts, lowercase_keys=True).as_dict() == cfg.as_dict() 556 | ) 557 | assert config_from_dict(joined_dicts, lowercase_keys=True) == cfg 558 | 559 | 560 | def test_alternate_set_loader_fails(): # type: ignore 561 | with pytest.raises( 562 | ValueError, 563 | match="configs should be a non-empty iterable of Configuration objects", 564 | ): 565 | assert config() is Exception 566 | 567 | with pytest.raises(ValueError): 568 | assert config(("no type", "")) is Exception 569 | 570 | with pytest.raises(ValueError): 571 | assert config("no type") is Exception 572 | 573 | with pytest.raises(ValueError): 574 | assert config([]) is Exception 575 | 576 | with pytest.raises(ValueError): 577 | assert config(("python",)) is Exception 578 | 579 | 580 | def test_allow_missing_paths(): # type: ignore 581 | import os 582 | import tempfile 583 | 584 | with tempfile.TemporaryDirectory() as folder: 585 | with pytest.raises(FileNotFoundError): 586 | config(("path", os.path.join(folder, "sub"))) 587 | with pytest.raises(FileNotFoundError): 588 | config(os.path.join(folder, "file.json")) 589 | with pytest.raises(FileNotFoundError): 590 | config(os.path.join(folder, "file.ini")) 591 | with pytest.raises(FileNotFoundError): 592 | config(os.path.join(folder, "file.env")) 593 | with pytest.raises(FileNotFoundError): 594 | config(os.path.join(folder, "module.py")) 595 | with pytest.raises(ModuleNotFoundError): 596 | config(("python", folder)) 597 | if yaml: 598 | with pytest.raises(FileNotFoundError): 599 | config(os.path.join(folder, "file.yaml")) 600 | if toml: 601 | with pytest.raises(FileNotFoundError): 602 | config(os.path.join(folder, "file.toml")) 603 | 604 | entries = [ 605 | "env", 606 | os.path.join(folder, "file.json"), 607 | os.path.join(folder, "file.ini"), 608 | os.path.join(folder, "file.env"), 609 | ("path", os.path.join(folder, "sub")), 610 | os.path.join(folder, "module.py"), 611 | ("python", folder), 612 | ] 613 | if yaml: 614 | entries.append(os.path.join(folder, "file.yaml")) 615 | if toml: 616 | entries.append(os.path.join(folder, "file.toml")) 617 | 618 | config(*entries, ignore_missing_paths=True) 619 | 620 | 621 | def test_allow_missing_paths_individually(): # type: ignore 622 | import os 623 | import tempfile 624 | 625 | with tempfile.TemporaryDirectory() as folder: 626 | with pytest.raises(FileNotFoundError): 627 | config(("path", os.path.join(folder, "sub"))) 628 | with pytest.raises(FileNotFoundError): 629 | config(os.path.join(folder, "file.json")) 630 | with pytest.raises(FileNotFoundError): 631 | config(os.path.join(folder, "file.ini")) 632 | with pytest.raises(FileNotFoundError): 633 | config(os.path.join(folder, "file.env")) 634 | with pytest.raises(FileNotFoundError): 635 | config(os.path.join(folder, "module.py")) 636 | with pytest.raises(ModuleNotFoundError): 637 | config(("python", folder)) 638 | if yaml: 639 | with pytest.raises(FileNotFoundError): 640 | config(os.path.join(folder, "file.yaml")) 641 | if toml: 642 | with pytest.raises(FileNotFoundError): 643 | config(os.path.join(folder, "file.toml")) 644 | 645 | cfg = ConfigurationSet( 646 | config_from_json( 647 | os.path.join(folder, "file.json"), 648 | read_from_file=True, 649 | ignore_missing_paths=True, 650 | ), 651 | config_from_ini( 652 | os.path.join(folder, "file.ini"), 653 | read_from_file=True, 654 | ignore_missing_paths=True, 655 | ), 656 | config_from_dotenv( 657 | os.path.join(folder, "file.env"), 658 | read_from_file=True, 659 | ignore_missing_paths=True, 660 | ), 661 | config_from_python( 662 | os.path.join(folder, "module.py"), 663 | ignore_missing_paths=True, 664 | ), 665 | config_from_python(folder, ignore_missing_paths=True), 666 | config_from_path(folder, ignore_missing_paths=True), 667 | config_from_env(prefix=PREFIX), 668 | ) 669 | 670 | assert cfg.as_dict() == config_from_env(prefix=PREFIX) 671 | 672 | if yaml: 673 | from config import config_from_yaml 674 | 675 | assert ( 676 | config_from_yaml( 677 | os.path.join(folder, "file.yaml"), 678 | read_from_file=True, 679 | ignore_missing_paths=True, 680 | ).as_dict() 681 | == {} 682 | ) 683 | 684 | if yaml: 685 | from config import config_from_toml 686 | 687 | assert ( 688 | config_from_toml( 689 | os.path.join(folder, "file.toml"), 690 | read_from_file=True, 691 | ignore_missing_paths=True, 692 | ).as_dict() 693 | == {} 694 | ) 695 | 696 | 697 | def test_dict_methods_items(): # type: ignore 698 | cfg = ConfigurationSet( 699 | config_from_dict(DICT2_1, lowercase_keys=True), 700 | config_from_dict(DICT2_2, lowercase_keys=True), 701 | config_from_env(prefix=PREFIX, lowercase_keys=True), 702 | ) 703 | 704 | assert dict(cfg.items()) == { 705 | "a1": { 706 | "b1.c1": "1", 707 | "b1.c2": "2", 708 | "b1.c3": "3", 709 | "b2.c1": "a", 710 | "b2.c2": "True", 711 | "b2.c3": "1.1", 712 | }, 713 | "a2": { 714 | "b1.c1": "f", 715 | "b1.c2": False, 716 | "b1.c3": None, 717 | "b2.c1": 10, 718 | "b2.c2": "YWJjZGVmZ2g=", 719 | "b2.c3": "abcdefgh", 720 | }, 721 | } 722 | 723 | with cfg.dotted_iter(): 724 | assert dict(cfg.items()) == { 725 | "a2.b2.c2": "YWJjZGVmZ2g=", 726 | "a1.b2.c2": "True", 727 | "a1.b2.c1": "a", 728 | "a1.b1.c2": "2", 729 | "a2.b2.c3": "abcdefgh", 730 | "a2.b1.c1": "f", 731 | "a1.b1.c3": "3", 732 | "a2.b1.c2": False, 733 | "a2.b1.c3": None, 734 | "a1.b1.c1": "1", 735 | "a2.b2.c1": 10, 736 | "a1.b2.c3": "1.1", 737 | } 738 | 739 | 740 | def test_dict_methods_keys_values(): # type: ignore 741 | cfg = ConfigurationSet( 742 | config_from_dict(DICT2_1, lowercase_keys=True), 743 | config_from_dict(DICT2_2, lowercase_keys=True), 744 | config_from_env(prefix=PREFIX, lowercase_keys=True), 745 | ) 746 | 747 | assert sorted(cfg.keys()) == [ 748 | "a1", 749 | "a2", 750 | ] 751 | 752 | assert dict(zip(cfg.keys(), cfg.values())) == { 753 | "a1": { 754 | "b1.c1": "1", 755 | "b1.c2": "2", 756 | "b1.c3": "3", 757 | "b2.c1": "a", 758 | "b2.c2": "True", 759 | "b2.c3": "1.1", 760 | }, 761 | "a2": { 762 | "b1.c1": "f", 763 | "b1.c2": False, 764 | "b1.c3": None, 765 | "b2.c1": 10, 766 | "b2.c2": "YWJjZGVmZ2g=", 767 | "b2.c3": "abcdefgh", 768 | }, 769 | } 770 | 771 | with cfg.dotted_iter(): 772 | assert sorted(cfg.keys()) == [ 773 | "a1.b1.c1", 774 | "a1.b1.c2", 775 | "a1.b1.c3", 776 | "a1.b2.c1", 777 | "a1.b2.c2", 778 | "a1.b2.c3", 779 | "a2.b1.c1", 780 | "a2.b1.c2", 781 | "a2.b1.c3", 782 | "a2.b2.c1", 783 | "a2.b2.c2", 784 | "a2.b2.c3", 785 | ] 786 | 787 | assert dict(zip(cfg.keys(), cfg.values())) == cfg.as_dict() 788 | 789 | 790 | def test_reload(): # type: ignore 791 | import sys 792 | 793 | path = str(os.path.join(os.path.dirname(__file__), "python_config.py")) 794 | 795 | import tempfile 796 | 797 | with tempfile.TemporaryDirectory() as folder, tempfile.NamedTemporaryFile( 798 | suffix=".json", 799 | ) as f1, tempfile.NamedTemporaryFile( 800 | suffix=".ini", 801 | ) as f2, tempfile.NamedTemporaryFile( 802 | suffix=".yaml", 803 | ) as f3, tempfile.NamedTemporaryFile( 804 | suffix=".toml", 805 | ) as f4, tempfile.NamedTemporaryFile( 806 | suffix=".env", 807 | ) as f5: 808 | # path 809 | subfolder = folder + "/sub" 810 | os.makedirs(subfolder) 811 | create_path_from_config(subfolder, config_from_dict(PATH_DICT), remove_level=1) 812 | # json 813 | f1.file.write(JSON.encode()) 814 | f1.file.flush() 815 | # ini 816 | f2.file.write(INI.encode()) 817 | f2.file.flush() 818 | # ini 819 | f5.file.write(DOTENV_PREFIX.encode()) 820 | f5.file.flush() 821 | 822 | entries = [ 823 | DICT2_1, # dict 824 | DICT2_2, 825 | "env", 826 | path, # python 827 | f1.name, # json 828 | f2.name, # ini 829 | f5.name, # .env 830 | folder, # path 831 | ] 832 | if yaml: 833 | f3.file.write(YAML.encode()) 834 | f3.file.flush() 835 | entries.append(f3.name) # yaml 836 | if toml: 837 | f4.file.write(TOML.encode()) 838 | f4.file.flush() 839 | entries.append(f4.name) # toml 840 | 841 | cfg = config(*entries, prefix="CONFIG", lowercase_keys=True) 842 | 843 | joined_dicts = {k: str(v) for k, v in DICT1.items()} 844 | joined_dicts.update(DICT2_1) 845 | joined_dicts.update(DICT2_2) 846 | joined_dicts.update(DICT3) 847 | joined_dicts.update(DICT_INI) 848 | joined_dicts.update(DICT_DOTENV) 849 | if yaml: 850 | joined_dicts.update(DICT_YAML) 851 | if toml: 852 | joined_dicts.update(DICT_TOML) 853 | joined_dicts.update((k, str(v)) for k, v in PATH_DICT.items()) 854 | joined_dicts["sys.version"] = sys.hexversion 855 | assert ( 856 | config_from_dict(joined_dicts, lowercase_keys=True).as_dict() 857 | == cfg.as_dict() 858 | ) 859 | assert config_from_dict(joined_dicts, lowercase_keys=True) == cfg 860 | 861 | # json 862 | f1.file.seek(0) 863 | f1.file.truncate(0) 864 | f1.file.write(JSON2.encode()) 865 | f1.file.flush() 866 | 867 | cfg.reload() 868 | assert cfg["a3.b1.c1"] == "afsdf" 869 | 870 | 871 | def test_configs(): # type: ignore 872 | # readable configs 873 | cfg = ConfigurationSet( 874 | config_from_dict(DICT2_1, lowercase_keys=True), 875 | config_from_dict(DICT2_2, lowercase_keys=True), 876 | config_from_env(prefix=PREFIX, lowercase_keys=True), 877 | ) 878 | 879 | assert cfg.configs[0] == config_from_dict(DICT2_1, lowercase_keys=True) 880 | cfg.configs = cfg.configs[1:] 881 | assert cfg.configs[0] == config_from_dict(DICT2_2, lowercase_keys=True) 882 | 883 | # writable configs 884 | cfg = ConfigurationSet( 885 | config_from_dict(DICT2_1, lowercase_keys=True), 886 | config_from_dict(DICT2_2, lowercase_keys=True), 887 | config_from_env(prefix=PREFIX, lowercase_keys=True), 888 | ) 889 | cfg.update({"abc": "xyz"}) 890 | 891 | assert cfg.configs[0] == config_from_dict(DICT2_1, lowercase_keys=True) 892 | cfg.configs = cfg.configs[1:] 893 | assert cfg.configs[0] == config_from_dict(DICT2_2, lowercase_keys=True) 894 | 895 | 896 | def test_separator(): # type: ignore 897 | import sys 898 | import tempfile 899 | 900 | path = os.path.join(os.path.dirname(__file__), "python_config_2.py") 901 | with tempfile.TemporaryDirectory() as folder: 902 | create_path_from_config(folder, config_from_dict(PATH_DICT), remove_level=0) 903 | entries = [ 904 | ("env", PREFIX), 905 | ("python", path, "CONFIG", "__"), 906 | ] 907 | cfg = config(*entries, lowercase_keys=True) 908 | 909 | joined_dicts = {k: str(v) for k, v in DICT1.items()} 910 | joined_dicts.update(DICT2_1) 911 | joined_dicts.update(DICT2_2) 912 | joined_dicts["sys.version"] = sys.hexversion 913 | assert ( 914 | config_from_dict(joined_dicts, lowercase_keys=True).as_dict() == cfg.as_dict() 915 | ) 916 | assert config_from_dict(joined_dicts, lowercase_keys=True) == cfg 917 | 918 | 919 | def test_separator_override_default(): # type: ignore 920 | import sys 921 | import tempfile 922 | 923 | path = os.path.join(os.path.dirname(__file__), "python_config.py") 924 | with tempfile.TemporaryDirectory() as folder: 925 | create_path_from_config(folder, config_from_dict(PATH_DICT), remove_level=0) 926 | entries = [ 927 | ("env", PREFIX, "__"), 928 | ("python", path, "CONFIG"), 929 | ] 930 | cfg = config(*entries, separator="_", lowercase_keys=True) 931 | 932 | joined_dicts = {k: str(v) for k, v in DICT1.items()} 933 | joined_dicts.update(DICT2_1) 934 | joined_dicts.update(DICT2_2) 935 | joined_dicts["sys.version"] = sys.hexversion 936 | assert ( 937 | config_from_dict(joined_dicts, lowercase_keys=True).as_dict() == cfg.as_dict() 938 | ) 939 | assert config_from_dict(joined_dicts, lowercase_keys=True) == cfg 940 | 941 | 942 | def test_same_as_configuration(): # type: ignore 943 | cfg = config_from_dict(DICT2_1, lowercase_keys=True) 944 | 945 | cfgset = ConfigurationSet(config_from_dict(DICT2_1, lowercase_keys=True)) 946 | 947 | assert cfg.get_dict("a2") == cfgset.get_dict("a2") 948 | assert cfg.a2.as_dict() == cfgset.a2.as_dict() 949 | assert dict(cfg.a2) == dict(cfgset.a2) 950 | 951 | assert dict(cfg.a2) == dict(cfg.a2.items()) 952 | assert dict(cfgset.a2) == dict(cfgset.a2.items()) 953 | 954 | assert cfg.as_dict() == cfgset.as_dict() 955 | assert dict(cfg) == dict(cfgset) 956 | 957 | 958 | def test_merging_values(): # type: ignore 959 | dict5_1 = {"a5.b1.c2": 3} 960 | dict5_2 = {"a5.b1.c1": 1, "a5.b1.c2": 2} 961 | 962 | cfg = ConfigurationSet( 963 | config_from_dict(dict5_1), 964 | config_from_dict(dict5_2), 965 | ) 966 | 967 | assert cfg["a5.b1"] == {"c1": 1, "c2": 3} 968 | assert cfg.a5.b1 == {"c1": 1, "c2": 3} 969 | -------------------------------------------------------------------------------- /tests/test_datatypes.py: -------------------------------------------------------------------------------- 1 | """Tests for data types.""" 2 | 3 | # ruff: noqa: D103,E501 4 | 5 | from config import config, config_from_dict 6 | from config.helpers import AttributeDict 7 | from pytest import raises 8 | 9 | 10 | def test_list_1(): # type: ignore 11 | definitions = {"my.var": ["hello"], "var": ["1", "2"]} 12 | 13 | cfg = config_from_dict(definitions, interpolate=True) 14 | cfg.var.insert(0, "0") 15 | assert cfg.var == ["1", "2"] 16 | cfg.my.var.insert(0, "hello again") 17 | assert cfg.my.var == ["hello"] 18 | 19 | 20 | def test_list_2(): # type: ignore 21 | definitions = {"var": ["1", "2"]} 22 | definitions2 = {"my.var": ["hello"]} 23 | 24 | cfg = config(definitions, definitions2, interpolate=True) 25 | cfg.var.insert(0, "0") 26 | assert cfg.var == ["1", "2"] 27 | cfg.my.var.insert(0, "hello again") 28 | assert cfg.my.var == ["hello"] 29 | 30 | 31 | def test_list_3(): # type: ignore 32 | definitions = {"my.var": ["hello"], "var": ["1", "2"]} 33 | 34 | cfg = config_from_dict(definitions, interpolate=True) 35 | assert cfg.my.as_dict() == {"var": ["hello"]} 36 | cfg.my.as_dict()["var"].insert(0, "hello again") 37 | assert cfg.my.as_dict() == {"var": ["hello"]} 38 | 39 | 40 | def test_attribute_dict_1(): # type: ignore 41 | definitions = { 42 | "my.var": ["hello"], 43 | "var": ["1", "2"], 44 | "var2": {"a": {"c": 1, "d": 10}, "b": 2}, 45 | } 46 | 47 | cfg = config_from_dict(definitions, interpolate=True) 48 | d = cfg.as_attrdict() 49 | 50 | assert isinstance(d, dict) 51 | assert isinstance(d, AttributeDict) 52 | assert d.var == ["1", "2"] 53 | assert d.my.var == ["hello"] 54 | assert d.var2.a == {"c": 1, "d": 10} 55 | 56 | with raises(AttributeError): 57 | assert d.var3 58 | 59 | d.var3 = "abc" 60 | assert d.var3 == "abc" 61 | 62 | 63 | def test_tuple(): # type: ignore 64 | d = {"a1": (1, 2)} 65 | cfg = config_from_dict(d, interpolate=True) 66 | assert cfg["a1"] == (1, 2) 67 | -------------------------------------------------------------------------------- /tests/test_dict_like.py: -------------------------------------------------------------------------------- 1 | """Tests configs for dictionary-like objects.""" 2 | 3 | # ruff: noqa: D103,E501 4 | 5 | from config import ConfigurationSet, config_from_dict 6 | from pytest import raises 7 | 8 | DICT = { 9 | "a1.B1.c1": 1, 10 | "a1.b1.C2": 2, 11 | "A1.b1.c3": 3, 12 | "a1.b2.c1": "a", 13 | "a1.b2.c2": True, 14 | "a1.b2.c3": 1.1, 15 | "a2.b1.c1": "f", 16 | "a2.b1.c2": False, 17 | "a2.b1.c3": None, 18 | "a2.b2.c1": 10, 19 | "a2.b2.c2": "YWJjZGVmZ2g=", 20 | "a2.b2.c3": "abcdefgh", 21 | } 22 | 23 | NESTED = { 24 | "a1": { 25 | "b1": {"c1": 10, "C2": 20, "c3": 30}, 26 | "b2": {"c1": "a0", "c2": False, "c3": 10.1}, 27 | }, 28 | } 29 | 30 | PROTECTED = { 31 | "important_password": "abc", 32 | "very_secret": "SeCReT", 33 | "clear_text": "abc", 34 | "url": "protocol://user:pass@hostname:port/path", 35 | "url2": "protocol://user@hostname:port/path", 36 | } 37 | 38 | 39 | def test_list(): # type: ignore 40 | cfg = config_from_dict(DICT, lowercase_keys=False) 41 | assert sorted(cfg) == ["A1", "a1", "a2"] 42 | assert list(cfg) == list(reversed(cfg))[::-1] 43 | 44 | with cfg.dotted_iter(): 45 | assert sorted(cfg) == sorted(DICT.keys()) 46 | assert list(cfg) == list(reversed(cfg))[::-1] 47 | 48 | 49 | def test_len(): # type: ignore 50 | cfg = config_from_dict(DICT, lowercase_keys=False) 51 | assert len(cfg) == 3 52 | with cfg.dotted_iter(): 53 | assert len(cfg) == len(DICT) 54 | 55 | 56 | def test_setitem(): # type: ignore 57 | cfg = config_from_dict(DICT, lowercase_keys=True) 58 | 59 | assert len(cfg) == 2 60 | assert cfg["a1.b2.c1"] == "a" 61 | 62 | cfg["a1.b2.c1"] = 89 63 | assert len(cfg) == 2 64 | assert cfg["a1.b2.c1"] == 89 65 | 66 | cfg["a1.b2.c4"] = True 67 | assert len(cfg) == 2 68 | assert cfg["a1.b2.c1"] == 89 69 | assert cfg["a1.b2.c4"] is True 70 | 71 | cfg["a3"] = {"b1": 10, "b2": "test"} 72 | assert len(cfg) == 3 73 | assert cfg["a3.b1"] == 10 74 | assert cfg["a3.b2"] == "test" 75 | 76 | cfg = config_from_dict(DICT, lowercase_keys=True) 77 | 78 | with cfg.dotted_iter(): 79 | assert len(cfg) == 12 80 | assert cfg["a1.b2.c1"] == "a" 81 | 82 | cfg["a1.b2.c1"] = 89 83 | assert len(cfg) == 12 84 | assert cfg["a1.b2.c1"] == 89 85 | 86 | cfg["a1.b2.c4"] = True 87 | assert len(cfg) == 13 88 | assert cfg["a1.b2.c1"] == 89 89 | assert cfg["a1.b2.c4"] is True 90 | 91 | cfg["a3"] = {"b1": 10, "b2": "test"} 92 | assert len(cfg) == 15 93 | assert cfg["a3.b1"] == 10 94 | assert cfg["a3.b2"] == "test" 95 | 96 | 97 | def test_update(): # type: ignore 98 | cfg = config_from_dict(DICT, lowercase_keys=True) 99 | 100 | assert len(cfg) == 2 101 | assert cfg["a1.b2.c1"] == "a" 102 | 103 | cfg.update(PROTECTED) 104 | assert len(cfg) == 7 105 | 106 | cfg.update(NESTED) 107 | 108 | assert len(cfg) == 7 109 | assert cfg["a1.b2.c1"] == "a0" 110 | 111 | cfg = config_from_dict(DICT, lowercase_keys=True) 112 | 113 | with cfg.dotted_iter(): 114 | assert len(cfg) == 12 115 | assert cfg["a1.b2.c1"] == "a" 116 | 117 | cfg.update(PROTECTED) 118 | assert len(cfg) == 17 119 | 120 | cfg.update(NESTED) 121 | 122 | assert len(cfg) == 17 123 | assert cfg["a1.b2.c1"] == "a0" 124 | 125 | 126 | def test_delitem(): # type: ignore 127 | cfg = config_from_dict(DICT, lowercase_keys=True) 128 | 129 | assert len(cfg) == 2 130 | 131 | del cfg["a1.b1"] 132 | assert len(cfg) == 2 133 | 134 | with raises(KeyError): 135 | del cfg["z"] 136 | 137 | cfg = config_from_dict(DICT, lowercase_keys=True) 138 | 139 | with cfg.dotted_iter(): 140 | assert len(cfg) == 12 141 | 142 | del cfg["a1.b1"] 143 | assert len(cfg) == 9 144 | 145 | with raises(KeyError): 146 | del cfg["z"] 147 | 148 | 149 | def test_in(): # type: ignore 150 | cfg = config_from_dict(DICT, lowercase_keys=True) 151 | 152 | assert "x" not in cfg 153 | assert "a1" in cfg 154 | assert "a1.b2" in cfg 155 | assert "a1.b2.c3" in cfg 156 | 157 | 158 | def test_clear(): # type: ignore 159 | cfg = config_from_dict(DICT, lowercase_keys=True) 160 | assert len(cfg) == 2 161 | with cfg.dotted_iter(): 162 | assert len(cfg) == 12 163 | 164 | cfg.clear() 165 | assert len(cfg) == 0 166 | with cfg.dotted_iter(): 167 | assert len(cfg) == 0 168 | 169 | 170 | def test_copy(): # type: ignore 171 | cfg = config_from_dict(DICT, lowercase_keys=True) 172 | assert len(cfg) == 2 173 | with cfg.dotted_iter(): 174 | assert len(cfg) == 12 175 | 176 | cfg2 = cfg.copy() 177 | 178 | assert cfg == cfg2 179 | 180 | 181 | def test_pop(): # type: ignore 182 | cfg = config_from_dict(DICT, lowercase_keys=True) 183 | assert len(cfg) == 2 184 | with cfg.dotted_iter(): 185 | assert len(cfg) == 12 186 | 187 | assert cfg.pop("a2.b1.c1") == "f" 188 | assert cfg.pop("a2.b1.c1", "something") == "something" 189 | with raises(KeyError): 190 | cfg.pop("a2.b1.c1") 191 | 192 | 193 | def test_setdefault(): # type: ignore 194 | cfg = config_from_dict(DICT, lowercase_keys=False) 195 | 196 | # no changes 197 | assert cfg.setdefault("a2.b1.c1") == "f" 198 | assert len(cfg) == 3 199 | assert sorted(cfg) == sorted(["A1", "a1", "a2"]) 200 | 201 | # add key 202 | assert cfg.setdefault("a2.b1.c7") is None 203 | assert len(cfg) == 3 204 | 205 | # add key with default 206 | assert cfg.setdefault("a2.b1.c8", "some value") == "some value" 207 | assert len(cfg) == 3 208 | assert cfg["a2.b1.c8"] == "some value" 209 | 210 | cfg = config_from_dict(DICT, lowercase_keys=False) 211 | 212 | with cfg.dotted_iter(): 213 | # no changes 214 | assert cfg.setdefault("a2.b1.c1") == "f" 215 | assert len(cfg) == 12 216 | assert sorted(cfg) == sorted(DICT.keys()) 217 | 218 | # add key 219 | assert cfg.setdefault("a2.b1.c7") is None 220 | assert len(cfg) == 13 221 | 222 | # add key with default 223 | assert cfg.setdefault("a2.b1.c8", "some value") == "some value" 224 | assert len(cfg) == 14 225 | assert cfg["a2.b1.c8"] == "some value" 226 | 227 | 228 | def test_configset_list(): # type: ignore 229 | cfg = ConfigurationSet( 230 | config_from_dict(DICT, lowercase_keys=False), 231 | config_from_dict(PROTECTED, lowercase_keys=False), 232 | ) 233 | 234 | assert sorted(cfg) == sorted(["A1", "a1", "a2"] + list(PROTECTED.keys())) 235 | with cfg.dotted_iter(): 236 | assert sorted(cfg) == sorted(list(DICT.keys()) + list(PROTECTED.keys())) 237 | assert list(cfg) == list(reversed(cfg))[::-1] 238 | 239 | 240 | def test_configset_len(): # type: ignore 241 | cfg = ConfigurationSet( 242 | config_from_dict(DICT, lowercase_keys=False), 243 | config_from_dict(PROTECTED, lowercase_keys=False), 244 | ) 245 | assert len(cfg) == 8 246 | with cfg.dotted_iter(): 247 | assert len(cfg) == len(DICT) + len(PROTECTED) 248 | 249 | 250 | def test_configset_setitem(): # type: ignore 251 | cfg = ConfigurationSet( 252 | config_from_dict(DICT, lowercase_keys=False), 253 | config_from_dict(PROTECTED, lowercase_keys=False), 254 | ) 255 | 256 | with cfg.dotted_iter(): 257 | assert len(cfg) == 17 258 | assert cfg["a1.b2.c1"] == "a" 259 | 260 | cfg["a1.b2.c1"] = 89 261 | with cfg.dotted_iter(): 262 | assert len(cfg) == 17 263 | assert cfg["a1.b2.c1"] == 89 264 | 265 | cfg["a1.b2.c4"] = True 266 | with cfg.dotted_iter(): 267 | assert len(cfg) == 18 268 | assert cfg["a1.b2.c1"] == 89 269 | assert cfg["a1.b2.c4"] is True 270 | 271 | cfg["a3"] = {"b1": 10, "b2": "test"} 272 | with cfg.dotted_iter(): 273 | assert len(cfg) == 20 274 | assert cfg["a3.b1"] == 10 275 | assert cfg["a3.b2"] == "test" 276 | 277 | 278 | def test_configset_update(): # type: ignore 279 | cfg = ConfigurationSet( 280 | config_from_dict(DICT, lowercase_keys=True), 281 | config_from_dict(PROTECTED, lowercase_keys=True), 282 | ) 283 | 284 | with cfg.dotted_iter(): 285 | assert len(cfg) == 17 286 | assert cfg["a1.b2.c1"] == "a" 287 | 288 | cfg.update(NESTED) 289 | with cfg.dotted_iter(): 290 | assert len(cfg) == 17 291 | assert cfg["a1.b2.c1"] == "a0" 292 | 293 | cfg.update({"important_password_2": "abc"}) 294 | with cfg.dotted_iter(): 295 | assert len(cfg) == 18 296 | 297 | 298 | def test_configset_delitem(): # type: ignore 299 | cfg = ConfigurationSet( 300 | config_from_dict(DICT, lowercase_keys=True), 301 | config_from_dict(PROTECTED, lowercase_keys=True), 302 | ) 303 | 304 | with cfg.dotted_iter(): 305 | assert len(cfg) == 17 306 | 307 | del cfg["a1.b1"] 308 | with cfg.dotted_iter(): 309 | assert len(cfg) == 14 310 | 311 | with raises(KeyError): 312 | del cfg["z"] 313 | 314 | 315 | def test_configset_in(): # type: ignore 316 | cfg = ConfigurationSet( 317 | config_from_dict(DICT, lowercase_keys=False), 318 | config_from_dict(PROTECTED, lowercase_keys=False), 319 | ) 320 | 321 | assert "x" not in cfg 322 | assert "a1" in cfg 323 | assert "a1.b2" in cfg 324 | assert "a1.b2.c3" in cfg 325 | 326 | 327 | def test_configset_clear(): # type: ignore 328 | cfg = ConfigurationSet( 329 | config_from_dict(DICT, lowercase_keys=False), 330 | config_from_dict(PROTECTED, lowercase_keys=False), 331 | ) 332 | with cfg.dotted_iter(): 333 | assert len(cfg) == 17 334 | 335 | cfg.clear() 336 | assert len(cfg) == 0 337 | with cfg.dotted_iter(): 338 | assert len(cfg) == 0 339 | 340 | 341 | def test_configset_copy(): # type: ignore 342 | cfg = ConfigurationSet( 343 | config_from_dict(DICT, lowercase_keys=True), 344 | config_from_dict(PROTECTED, lowercase_keys=True), 345 | ) 346 | with cfg.dotted_iter(): 347 | assert len(cfg) == 17 348 | 349 | cfg2 = cfg.copy() 350 | assert cfg == cfg2 351 | 352 | 353 | def test_configset_pop(): # type: ignore 354 | cfg = ConfigurationSet( 355 | config_from_dict(DICT, lowercase_keys=True), 356 | config_from_dict(PROTECTED, lowercase_keys=True), 357 | ) 358 | 359 | with cfg.dotted_iter(): 360 | assert len(cfg) == 17 361 | 362 | assert cfg.pop("a2.b1.c1") == "f" 363 | assert cfg.pop("a2.b1.c1", "something") == "something" 364 | with raises(KeyError): 365 | cfg.pop("a2.b1.c1") 366 | 367 | with cfg.dotted_iter(): 368 | assert len(cfg) == 16 369 | 370 | 371 | def test_configset_setdefault(): # type: ignore 372 | cfg = ConfigurationSet( 373 | config_from_dict(DICT, lowercase_keys=False), 374 | config_from_dict(PROTECTED, lowercase_keys=False), 375 | ) 376 | 377 | # no changes 378 | assert cfg.setdefault("a2.b1.c1") == "f" 379 | assert len(cfg) == 8 380 | with cfg.dotted_iter(): 381 | assert len(cfg) == 17 382 | assert sorted(cfg) == sorted(list(DICT.keys()) + list(PROTECTED.keys())) 383 | 384 | # add key 385 | assert cfg.setdefault("a2.b1.c7") is None 386 | assert len(cfg) == 8 387 | with cfg.dotted_iter(): 388 | assert len(cfg) == 18 389 | 390 | # add key with default 391 | assert cfg.setdefault("a2.b1.c8", "some value") == "some value" 392 | assert len(cfg) == 8 393 | with cfg.dotted_iter(): 394 | assert len(cfg) == 19 395 | assert cfg["a2.b1.c8"] == "some value" 396 | -------------------------------------------------------------------------------- /tests/test_dotenv.py: -------------------------------------------------------------------------------- 1 | """Tests for dotenv files.""" 2 | 3 | # ruff: noqa: D103,E501,SIM115 4 | 5 | import tempfile 6 | 7 | import pytest 8 | from config import config, config_from_dict, config_from_dotenv 9 | 10 | DOTENV = """ 11 | KEY1 = abc 12 | KEY2 = def 13 | KEY3 = 1.1 14 | """ 15 | 16 | DOTENV_WITH_PREFIXES = """ 17 | PREFIX__KEY1 = abc 18 | PREFIX__KEY2 = def 19 | PREFIX__KEY3 = 1.1 20 | NOTPREFIX__KEY = 2 21 | PREFIX__KEY4__A = 1 22 | PREFIX__KEY4__B = 2 23 | PREFIX__KEY4__C = 3 24 | """ 25 | 26 | DOTENV_WITH_COMMENTS = """ 27 | # key 1 28 | KEY1 = abc 29 | # key 2 30 | KEY2 = def 31 | # key 3 32 | KEY3 = 1.1 33 | """ 34 | 35 | DICT = { 36 | "key1": "abc", 37 | "key2": "def", 38 | "key3": "1.1", 39 | } 40 | 41 | DICT_WITH_PREFIXES = { 42 | "key1": "abc", 43 | "key2": "def", 44 | "key3": "1.1", 45 | "key4": { 46 | "a": "1", 47 | "b": "2", 48 | "c": "3", 49 | }, 50 | } 51 | 52 | 53 | def test_load_dotenv(): # type: ignore 54 | cfg = config_from_dotenv(DOTENV, lowercase_keys=True) 55 | assert cfg == config_from_dict({k: str(v) for k, v in DICT.items()}) 56 | 57 | 58 | def test_load_dotenv_file(): # type: ignore 59 | with tempfile.NamedTemporaryFile() as f: 60 | f.file.write(DOTENV.encode()) 61 | f.file.flush() 62 | cfg = config_from_dotenv( 63 | open(f.name, "rt"), 64 | read_from_file=True, 65 | lowercase_keys=True, 66 | ) 67 | assert cfg == config_from_dict({k: str(v) for k, v in DICT.items()}) 68 | 69 | 70 | def test_load_dotenv_filename(): # type: ignore 71 | with tempfile.NamedTemporaryFile() as f: 72 | f.file.write(DOTENV.encode()) 73 | f.file.flush() 74 | cfg = config_from_dotenv(f.name, read_from_file=True, lowercase_keys=True) 75 | assert cfg == config_from_dict({k: str(v) for k, v in DICT.items()}) 76 | 77 | 78 | def test_load_dotenv_config(): # type: ignore 79 | with tempfile.NamedTemporaryFile(suffix=".env") as f: 80 | f.file.write(DOTENV_WITH_PREFIXES.encode()) 81 | f.file.flush() 82 | cfg = config(f.name, lowercase_keys=True, prefix="PREFIX") 83 | 84 | assert cfg == config_from_dict(DICT_WITH_PREFIXES) 85 | 86 | 87 | def test_reload(): # type: ignore 88 | with tempfile.NamedTemporaryFile() as f: 89 | f.file.write(DOTENV.encode()) 90 | f.file.flush() 91 | cfg = config_from_dotenv( 92 | open(f.name, "rt"), 93 | read_from_file=True, 94 | lowercase_keys=True, 95 | ) 96 | 97 | assert cfg == config_from_dict({k: str(v) for k, v in DICT.items()}) 98 | 99 | f.file.write(b"\nkey10 = 1\n") 100 | f.file.flush() 101 | cfg = config_from_dotenv( 102 | open(f.name, "rt"), 103 | read_from_file=True, 104 | lowercase_keys=True, 105 | ) 106 | cfg2 = config_from_dict({k: str(v) for k, v in DICT.items()}) 107 | cfg2["key10"] = "1" 108 | assert cfg == cfg2 109 | 110 | 111 | def test_load_dotenv_2(): # type: ignore 112 | cfg = config_from_dotenv(DOTENV_WITH_PREFIXES, lowercase_keys=True, prefix="PREFIX") 113 | assert cfg == config_from_dict(DICT_WITH_PREFIXES) 114 | 115 | 116 | def test_load_dotenv_comments(): # type: ignore 117 | cfg = config_from_dotenv(DOTENV_WITH_COMMENTS, lowercase_keys=True) 118 | assert cfg == config_from_dict({k: str(v) for k, v in DICT.items()}) 119 | 120 | 121 | def test_load_dotenv_comments_invalid(): # type: ignore 122 | invalid = """ 123 | # key 1 124 | VALID=1 125 | ## key2 126 | INVALID 127 | """ 128 | with pytest.raises(ValueError) as err: 129 | config_from_dotenv(invalid, lowercase_keys=True) 130 | assert "Invalid line INVALID" in str(err) 131 | -------------------------------------------------------------------------------- /tests/test_env.py: -------------------------------------------------------------------------------- 1 | """Tests for environments.""" 2 | 3 | # ruff: noqa: D103,E501,SIM115 4 | 5 | import os 6 | 7 | from config import config_from_dict, config_from_env 8 | 9 | DICT = { 10 | "a1.b1.c1": 1, 11 | "a1.b1.c2": 2, 12 | "a1.b1.c3": 3, 13 | "a1.b2.c1": "a", 14 | "a1.b2.c2": True, 15 | "a1.b2.c3": 1.1, 16 | "a2.b1.c1": "f", 17 | "a2.b1.c2": False, 18 | "a2.b1.c3": None, 19 | "a2.b2.c1": 10, 20 | "a2.b2.c2": "YWJjZGVmZ2g=", 21 | "a2.b2.c3": "abcdefgh", 22 | } 23 | 24 | PREFIX = "PYTHONCONFIG" 25 | 26 | 27 | def test_load_env(): # type: ignore 28 | os.environ.update( 29 | (PREFIX + "__" + k.replace(".", "__").upper(), str(v)) for k, v in DICT.items() 30 | ) 31 | 32 | cfg = config_from_env(PREFIX, lowercase_keys=True) 33 | assert cfg["a1.b1.c1"] == "1" 34 | assert cfg["a1.b1"].get_int("c1") == 1 35 | assert cfg["a1.b1"].as_dict() == {"c1": "1", "c2": "2", "c3": "3"} 36 | assert cfg["a1.b2"].as_dict() == {"c1": "a", "c2": "True", "c3": "1.1"} 37 | 38 | 39 | def test_equality(): # type: ignore 40 | os.environ.update( 41 | (PREFIX + "__" + k.replace(".", "__").upper(), str(v)) for k, v in DICT.items() 42 | ) 43 | 44 | cfg = config_from_env(PREFIX, lowercase_keys=True) 45 | assert cfg == config_from_dict({k: str(v) for k, v in DICT.items()}) 46 | 47 | 48 | def test_reload(): # type: ignore 49 | os.environ.update( 50 | (PREFIX + "__" + k.replace(".", "__").upper(), str(v)) for k, v in DICT.items() 51 | ) 52 | 53 | cfg = config_from_env(PREFIX, lowercase_keys=True) 54 | assert cfg == config_from_dict({k: str(v) for k, v in DICT.items()}) 55 | 56 | os.environ[PREFIX + "__" + "A2__B2__C3"] = "updated" 57 | assert cfg == config_from_dict({k: str(v) for k, v in DICT.items()}) 58 | cfg.reload() 59 | d = DICT.copy() 60 | d["a2.b2.c3"] = "updated" 61 | assert cfg == config_from_dict({k: str(v) for k, v in d.items()}) 62 | 63 | 64 | def test_reload_2(): # type: ignore 65 | os.environ.update( 66 | (PREFIX + "__" + k.replace(".", "__").upper(), str(v)) for k, v in DICT.items() 67 | ) 68 | 69 | cfg = config_from_env(PREFIX, lowercase_keys=True, strip_prefix=False) 70 | assert cfg == config_from_dict( 71 | {PREFIX.lower() + "." + k: str(v) for k, v in DICT.items()}, 72 | ) 73 | 74 | os.environ[PREFIX + "__" + "A2__B2__C3"] = "updated" 75 | assert cfg == config_from_dict( 76 | {PREFIX.lower() + "." + k: str(v) for k, v in DICT.items()}, 77 | ) 78 | cfg.reload() 79 | d = DICT.copy() 80 | d["a2.b2.c3"] = "updated" 81 | assert cfg == config_from_dict( 82 | {PREFIX.lower() + "." + k: str(v) for k, v in d.items()}, 83 | ) 84 | -------------------------------------------------------------------------------- /tests/test_ini.py: -------------------------------------------------------------------------------- 1 | """Tests for ini files.""" 2 | 3 | # ruff: noqa: D103,E501,SIM115 4 | 5 | import tempfile 6 | 7 | from config import config_from_dict, config_from_ini 8 | 9 | INI = """ 10 | [section1] 11 | key1 = True 12 | 13 | [section2] 14 | key1 = abc 15 | key2 = def 16 | key3 = 1.1 17 | 18 | [section3] 19 | key1 = 1 20 | key2 = 0 21 | """ 22 | 23 | DICT = { 24 | "section1.key1": True, 25 | "section2.key1": "abc", 26 | "section2.key2": "def", 27 | "section2.key3": 1.1, 28 | "section3.key1": 1, 29 | "section3.key2": 0, 30 | } 31 | 32 | 33 | def test_load_ini(): # type: ignore 34 | cfg = config_from_ini(INI) 35 | assert cfg == config_from_dict({k: str(v) for k, v in DICT.items()}) 36 | 37 | 38 | def test_load_ini_file(): # type: ignore 39 | with tempfile.NamedTemporaryFile() as f: 40 | f.file.write(INI.encode()) 41 | f.file.flush() 42 | cfg = config_from_ini(open(f.name, "rt"), read_from_file=True) 43 | 44 | assert cfg == config_from_dict({k: str(v) for k, v in DICT.items()}) 45 | 46 | 47 | def test_load_ini_filename(): # type: ignore 48 | with tempfile.NamedTemporaryFile() as f: 49 | f.file.write(INI.encode()) 50 | f.file.flush() 51 | cfg = config_from_ini(f.name, read_from_file=True) 52 | 53 | assert cfg == config_from_dict({k: str(v) for k, v in DICT.items()}) 54 | 55 | 56 | def test_reload(): # type: ignore 57 | with tempfile.NamedTemporaryFile() as f: 58 | f.file.write(INI.encode()) 59 | f.file.flush() 60 | cfg = config_from_ini(open(f.name, "rt"), read_from_file=True) 61 | 62 | assert cfg == config_from_dict({k: str(v) for k, v in DICT.items()}) 63 | 64 | f.file.write(b"\n[section4]\nkey10 = 1\n") 65 | f.file.flush() 66 | cfg = config_from_ini(open(f.name, "rt"), read_from_file=True) 67 | cfg2 = config_from_dict({k: str(v) for k, v in DICT.items()}) 68 | cfg2["section4.key10"] = "1" 69 | assert cfg == cfg2 70 | 71 | 72 | def test_reload_with_section_prefix(): # type: ignore 73 | with tempfile.NamedTemporaryFile() as f: 74 | ini = """ 75 | [coverage:run] 76 | branch = False 77 | parallel = False 78 | 79 | [other:section1] 80 | key1 = abc 81 | key2 = def 82 | key3 = 1.1 83 | 84 | [section2] 85 | key1 = 1 86 | key2 = 0 87 | """ 88 | 89 | f.file.write(ini.encode()) 90 | f.file.flush() 91 | cfg = config_from_ini( 92 | open(f.name, "rt"), 93 | section_prefix="coverage:", 94 | read_from_file=True, 95 | ) 96 | 97 | expected = config_from_dict( 98 | { 99 | "run.branch": "False", 100 | "run.parallel": "False", 101 | }, 102 | ) 103 | 104 | assert cfg == expected 105 | 106 | f.file.write(b"\n[coverage:report]\nignore_errors = False\n") 107 | f.file.flush() 108 | cfg = config_from_ini( 109 | open(f.name, "rt"), 110 | section_prefix="coverage:", 111 | read_from_file=True, 112 | ) 113 | cfg2 = config_from_dict( 114 | { 115 | "run.branch": "False", 116 | "run.parallel": "False", 117 | "report.ignore_errors": "False", 118 | }, 119 | ) 120 | 121 | assert cfg == cfg2 122 | -------------------------------------------------------------------------------- /tests/test_interpolation.py: -------------------------------------------------------------------------------- 1 | """Tests for interpolation.""" 2 | 3 | # ruff: noqa: D103,E501 4 | 5 | from config import InterpolateEnumType, config, config_from_dict 6 | from pytest import raises 7 | 8 | VALUES = {"var1": "This {var2}", "var2": "is a {var3}", "var3": "test"} 9 | FAILS = {"var1": "This will fail {var2}", "var2": "{var3}", "var3": "{var1}"} 10 | MULTI = {"var1": "This is a {var2} {var3}", "var2": "repeat {var3}", "var3": "test"} 11 | ARRAY = { 12 | "var1": ["This is a {var2} {var3}", "{var2}", "{var3}"], 13 | "var2": "repeat {var3}", 14 | "var3": "test", 15 | "var4": ["{var3}", ["{var2}"], 1], 16 | } 17 | SET1 = {"var1": "This {var2}", "var2": "is a {var3}"} 18 | SET2 = {"var3": "test"} 19 | VALUES_FMT = {"percentage": "{val:.3%}", "with_sign": "{val:+f}", "val": 1.23456} 20 | 21 | 22 | def test_no_interpolation(): # type: ignore 23 | cfg = config_from_dict(VALUES, lowercase_keys=True) 24 | 25 | assert cfg["var3"] == "test" 26 | assert cfg["var2"] == "is a {var3}" 27 | assert cfg["var1"] == "This {var2}" 28 | 29 | 30 | def test_interpolation(): # type: ignore 31 | cfg = config_from_dict(VALUES, lowercase_keys=True, interpolate=True) 32 | 33 | assert cfg["var3"] == "test" 34 | assert cfg["var2"] == "is a test" 35 | assert cfg["var1"] == "This is a test" 36 | assert cfg.var1 == "This is a test" 37 | 38 | cfg = config_from_dict( 39 | VALUES, 40 | lowercase_keys=True, 41 | interpolate=True, 42 | interpolate_type=InterpolateEnumType.DEEP, 43 | ) 44 | 45 | assert cfg["var3"] == "test" 46 | assert cfg["var2"] == "is a test" 47 | assert cfg["var1"] == "This is a test" 48 | assert cfg.var1 == "This is a test" 49 | 50 | cfg = config_from_dict( 51 | VALUES, 52 | lowercase_keys=True, 53 | interpolate=True, 54 | interpolate_type="unknown", # fail interpolation 55 | ) 56 | 57 | with raises(ValueError, match='Invalid interpolation method "unknown"'): 58 | assert cfg.var3 == "fail" 59 | 60 | 61 | def test_raise_on_interpolation_cycle(): # type: ignore 62 | cfg = config_from_dict(FAILS, lowercase_keys=True, interpolate=True) 63 | with raises(ValueError, match="Cycle detected"): 64 | assert cfg["var1"] 65 | 66 | 67 | def test_multiple_interpolation(): # type: ignore 68 | cfg = config_from_dict(MULTI, lowercase_keys=True, interpolate=True) 69 | 70 | assert cfg["var3"] == "test" 71 | assert cfg["var2"] == "repeat test" 72 | assert cfg["var1"] == "This is a repeat test test" 73 | assert cfg.var1 == "This is a repeat test test" 74 | 75 | 76 | def test_list(): # type: ignore 77 | cfg = config_from_dict(ARRAY, lowercase_keys=True, interpolate=True) 78 | 79 | assert cfg["var3"] == "test" 80 | assert cfg["var2"] == "repeat test" 81 | assert cfg["var1"] == ["This is a repeat test test", "repeat test", "test"] 82 | assert cfg["var4"] == ["test", ["repeat test"], 1] 83 | 84 | assert cfg.get_list("var1") == ["This is a repeat test test", "repeat test", "test"] 85 | 86 | 87 | def test_interpolation_on_set(): # type: ignore 88 | cfg = config(SET1, SET2, lowercase_keys=True, interpolate=True) 89 | 90 | assert cfg["var3"] == "test" 91 | assert cfg["var2"] == "is a test" 92 | assert cfg["var1"] == "This is a test" 93 | assert cfg.var1 == "This is a test" 94 | 95 | 96 | def test_no_interpolation_on_set(): # type: ignore 97 | cfg = config(SET1, SET2, lowercase_keys=True, interpolate=False) 98 | 99 | assert cfg["var3"] == "test" 100 | assert cfg["var2"] == "is a {var3}" 101 | assert cfg["var1"] == "This {var2}" 102 | assert cfg.var1 == "This {var2}" 103 | 104 | 105 | def test_interpolation_with_formatting(): # type: ignore 106 | cfg = config_from_dict(VALUES_FMT, lowercase_keys=True, interpolate=True) 107 | 108 | assert cfg["val"] == 1.23456 109 | assert cfg["with_sign"] == "+1.234560" 110 | assert cfg["percentage"] == "123.456%" 111 | assert cfg.percentage == "123.456%" 112 | 113 | cfg = config_from_dict( 114 | VALUES_FMT, 115 | lowercase_keys=True, 116 | interpolate=True, 117 | interpolate_type=InterpolateEnumType.DEEP, 118 | ) 119 | 120 | assert cfg["val"] == 1.23456 121 | assert cfg["with_sign"] == "+1.234560" 122 | assert cfg["percentage"] == "123.456%" 123 | assert cfg.percentage == "123.456%" 124 | 125 | 126 | def test_interpolation_with_literals(): # type: ignore 127 | # literals escaped 128 | values = { 129 | "something": "value_of_something", 130 | "interpolatable": "say {something} {{literal}}", 131 | "interpolatable2": "{interpolatable} {{another_literal}}", 132 | } 133 | 134 | cfg = config_from_dict(values, interpolate=True) 135 | 136 | assert cfg.something == "value_of_something" 137 | assert cfg.interpolatable == "say value_of_something {literal}" 138 | assert cfg.interpolatable2 == "say value_of_something {literal} {another_literal}" 139 | 140 | # passing extra values to interpolate 141 | values = { 142 | "something": "value_of_something", 143 | "interpolatable": "say {something} {literal}", 144 | "interpolatable2": "{interpolatable} {another_literal}", 145 | } 146 | 147 | cfg = config_from_dict( 148 | values, 149 | interpolate={"literal": "abc", "another_literal": "xyz"}, 150 | ) 151 | 152 | assert cfg.something == "value_of_something" 153 | assert cfg.interpolatable == "say value_of_something abc" 154 | assert cfg.interpolatable2 == "say value_of_something abc xyz" 155 | 156 | 157 | def test_interpolation_with_nested(): # type: ignore 158 | cfg = config_from_dict({"data.value": 15, "data.nested.value2": 16}) 159 | 160 | assert cfg.data == {"value": 15, "nested": {"value2": 16}} 161 | assert cfg.data.value == 15 162 | 163 | assert "{data.value}".format(**cfg) == "15" 164 | assert "{data.nested.value2}".format(**cfg) == "16" 165 | 166 | assert "{data.value}".format(data=cfg.data) == "15" 167 | assert "{data.nested.value2}".format(data=cfg.data) == "16" 168 | 169 | 170 | def test_interpolation_same_variable_1(): # type: ignore 171 | values_1 = {"var1": "something"} 172 | values_2 = {"var1": "{var1}/else", "var2": "{var1}"} 173 | 174 | cfg = config(values_1, values_2, lowercase_keys=True, interpolate=True) 175 | assert cfg.var1 == "something" 176 | 177 | cfg = config(values_2, values_1, lowercase_keys=True, interpolate=True) 178 | with raises(ValueError, match="Cycle detected"): 179 | assert cfg.var1 == "something/else" 180 | 181 | cfg = config( 182 | values_2, 183 | values_1, 184 | lowercase_keys=True, 185 | interpolate=True, 186 | interpolate_type=InterpolateEnumType.DEEP, 187 | ) 188 | assert cfg.var2 == "something/else" 189 | assert cfg.var1 == "something/else" 190 | 191 | 192 | def test_interpolation_same_variable_2(): # type: ignore 193 | values_1 = {"var1": "something", "var2": "test"} 194 | values_2 = {"var1": "{var1}/else", "var2": "{var1}", "var3": "{fail}"} 195 | 196 | cfg = config(values_1, values_2, lowercase_keys=True, interpolate=True) 197 | assert cfg.var1 == "something" 198 | assert cfg.var2 == "test" 199 | 200 | cfg = config(values_2, values_1, lowercase_keys=True, interpolate=True) 201 | with raises(ValueError, match="Cycle detected"): 202 | assert cfg.var1 == "something/else" 203 | with raises(ValueError, match="Cycle detected"): 204 | assert cfg.var2 == "something/else" 205 | 206 | cfg = config( 207 | values_2, 208 | values_1, 209 | lowercase_keys=True, 210 | interpolate=True, 211 | interpolate_type=InterpolateEnumType.DEEP, 212 | ) 213 | assert cfg.var2 == "something/else" 214 | assert cfg.var1 == "something/else" 215 | with raises(KeyError, match="fail"): 216 | assert cfg.var3 == "this should fail" 217 | 218 | 219 | def test_interpolation_same_variable_3(): # type: ignore 220 | values_1 = {"var1": "something", "var2": "test"} 221 | values_2 = {"var1": "{var2}/a", "var2": "{var1}/b"} 222 | 223 | cfg = config(values_1, values_2, lowercase_keys=True, interpolate=True) 224 | assert cfg.var1 == "something" 225 | assert cfg.var2 == "test" 226 | 227 | cfg = config(values_2, values_1, lowercase_keys=True, interpolate=True) 228 | with raises(ValueError, match="Cycle detected"): 229 | assert cfg.var1 == "something/else" 230 | with raises(ValueError, match="Cycle detected"): 231 | assert cfg.var2 == "something/else" 232 | 233 | cfg = config( 234 | values_2, 235 | values_1, 236 | lowercase_keys=True, 237 | interpolate=True, 238 | interpolate_type=InterpolateEnumType.DEEP, 239 | ) 240 | assert cfg.var2 == "test/a/b" # var2(2) --> var1(2) --> var2(1) 241 | assert cfg.var1 == "something/b/a" # var1(2) --> var2(2) --> var1(1) 242 | 243 | 244 | def test_interpolation_same_variable_4(): # type: ignore 245 | values_1 = {"var1": "{var2}", "var2": "test"} 246 | values_2 = {"var1": "{var1}/a", "var2": "{var1}/b"} 247 | 248 | cfg = config(values_1, values_2, lowercase_keys=True, interpolate=True) 249 | assert cfg.var1 == "test" 250 | assert cfg.var2 == "test" 251 | 252 | cfg = config(values_2, values_1, lowercase_keys=True, interpolate=True) 253 | with raises(ValueError, match="Cycle detected"): 254 | assert cfg.var1 == "something/else" 255 | with raises(ValueError, match="Cycle detected"): 256 | assert cfg.var2 == "something/else" 257 | 258 | cfg = config( 259 | values_2, 260 | values_1, 261 | lowercase_keys=True, 262 | interpolate=True, 263 | interpolate_type=InterpolateEnumType.DEEP, 264 | ) 265 | assert cfg.var2 == "test/a/b" # var2(2) --> var1(2) --> var1(1) --> var2(1) 266 | with raises(KeyError, match="var1"): 267 | assert cfg.var1 == "test/a" # var1(2) --> var1(1) --> var2(1) --> var1(fail) 268 | 269 | cfg = config( 270 | values_2, 271 | values_1, 272 | lowercase_keys=True, 273 | interpolate=True, 274 | interpolate_type=InterpolateEnumType.DEEP_NO_BACKTRACK, 275 | ) 276 | assert cfg.var2 == "test/a/b" # var2(2) --> var1(2) --> var1(1) --> var2(1) 277 | assert cfg.var1 == "test/a" # var1(2) --> var1(1) --> var2(1) 278 | -------------------------------------------------------------------------------- /tests/test_issues.py: -------------------------------------------------------------------------------- 1 | """Tests for github issues.""" 2 | 3 | # ruff: noqa: D103,E501 4 | 5 | import pytest 6 | from config import ( 7 | Configuration, 8 | ConfigurationSet, 9 | EnvConfiguration, 10 | config, 11 | config_from_dict, 12 | config_from_dotenv, 13 | ) 14 | 15 | 16 | def test_issue_49(): # type: ignore 17 | d = {"path": {"to": {"value-a": "A", "value-b": "B"}}} 18 | base_cfg = config_from_dict(d) 19 | 20 | d = {"path": {"to": {"value-a": "C"}}} 21 | 22 | cfg = config_from_dict(d) 23 | cfg_set = ConfigurationSet(cfg, base_cfg) 24 | 25 | path_config = cfg_set.get("path") 26 | 27 | assert path_config == {"to.value-a": "C", "to.value-b": "B"} 28 | 29 | 30 | def test_issue_63_a(): # type: ignore 31 | import os 32 | import tempfile 33 | 34 | ini = "[APP_NAME]\nfoo = bar\n\n[ANOTHER_APP]\nspam = egg" 35 | os.environ.update([("APP_NAME_EASTER", "egg")]) 36 | 37 | with tempfile.NamedTemporaryFile(suffix=".ini") as f: 38 | f.file.write(ini.encode()) 39 | f.file.flush() 40 | configs = config(f.name, "env", prefix="APP_NAME", separator="_") 41 | assert configs == { 42 | "APP_NAME": {"foo": "bar"}, 43 | "EASTER": "egg", 44 | "ANOTHER_APP": {"spam": "egg"}, 45 | } 46 | 47 | 48 | def test_issue_63_b(): # type: ignore 49 | import os 50 | import tempfile 51 | 52 | ini = "[APP_NAME]\nfoo = bar\n\n[ANOTHER_APP]\nspam = egg" 53 | os.environ.update([("PREFIX__APP_NAME__EASTER", "egg")]) 54 | 55 | with tempfile.NamedTemporaryFile(suffix=".ini") as f: 56 | f.file.write(ini.encode()) 57 | f.file.flush() 58 | configs = config(f.name, "env", prefix="PREFIX", separator="__") 59 | assert configs == { 60 | "APP_NAME": { 61 | "foo": "bar", 62 | "EASTER": "egg", 63 | }, 64 | "ANOTHER_APP": {"spam": "egg"}, 65 | } 66 | 67 | 68 | def test_issue_77(): # type: ignore 69 | env = EnvConfiguration(prefix="whatever") 70 | assert repr(env).startswith(" 0: 32 | subfolder += "/sub" 33 | os.makedirs(subfolder) 34 | lvl -= 1 35 | create_path_from_config( 36 | subfolder, 37 | config_from_dict(dic), 38 | remove_level=remove_level, 39 | ) 40 | if trailing_slash: 41 | folder += "/" 42 | cfg = config_from_path(folder, remove_level=remove_level) 43 | cfg2 = config_from_path(folder, remove_level=remove_level) 44 | walk = list(os.walk(folder)) 45 | 46 | # add an extra element to test reloads 47 | d = dic.copy() 48 | d["extra.value"] = 1 49 | create_path_from_config( 50 | subfolder, 51 | config_from_dict(d), 52 | remove_level=remove_level, 53 | ) 54 | cfg2.reload() 55 | 56 | return cfg, folder, walk, cfg2 57 | 58 | 59 | def test_load_path(): # type: ignore 60 | cfg, folder, walk, _ = _config_from_temp_path(DICT, remove_level=0) 61 | assert set(walk[0][2]) == set(DICT.keys()) 62 | assert cfg["a1.b1"].get_int("c1") == 1 63 | assert cfg["a1.b1"].as_dict() == {"c1": "1", "c2": "2", "c3": "3"} 64 | assert cfg["a1.b2"].as_dict() == {"c1": "a", "c2": "True", "c3": "1.1"} 65 | 66 | 67 | def test_load_path_with_trailing_slash(): # type: ignore 68 | cfg, folder, walk, _ = _config_from_temp_path( 69 | DICT, 70 | remove_level=0, 71 | trailing_slash=True, 72 | ) 73 | assert set(walk[0][2]) == set(DICT.keys()) 74 | assert cfg["a1.b1"].get_int("c1") == 1 75 | assert cfg["a1.b1"].as_dict() == {"c1": "1", "c2": "2", "c3": "3"} 76 | assert cfg["a1.b2"].as_dict() == {"c1": "a", "c2": "True", "c3": "1.1"} 77 | 78 | 79 | def test_equality(): # type: ignore 80 | cfg, folder, walk, _ = _config_from_temp_path(DICT, remove_level=0) 81 | assert cfg == config_from_dict({k: str(v) for k, v in DICT.items()}) 82 | 83 | 84 | def test_load_path_level(): # type: ignore 85 | cfg, folder, walk, _ = _config_from_temp_path(DICT, remove_level=1) 86 | assert walk[0][0] == folder 87 | assert walk[0][2] == [] 88 | assert set(walk[1][2]) == set(DICT.keys()) 89 | assert cfg["a1.b1"].get_int("c1") == 1 90 | assert cfg["a1.b1"].as_dict() == {"c1": "1", "c2": "2", "c3": "3"} 91 | assert cfg["a1.b2"].as_dict() == {"c1": "a", "c2": "True", "c3": "1.1"} 92 | 93 | 94 | def test_load_path_level_2(): # type: ignore 95 | cfg, folder, walk, _ = _config_from_temp_path(DICT, remove_level=2) 96 | assert walk[0][0] == folder 97 | assert walk[0][2] == [] 98 | assert walk[1][0] == folder + "/sub" 99 | assert walk[1][2] == [] 100 | assert set(walk[2][2]) == set(DICT.keys()) 101 | assert cfg["a1.b1"].get_int("c1") == 1 102 | assert cfg["a1.b1"].as_dict() == {"c1": "1", "c2": "2", "c3": "3"} 103 | assert cfg["a1.b2"].as_dict() == {"c1": "a", "c2": "True", "c3": "1.1"} 104 | 105 | 106 | def test_reload(): # type: ignore 107 | cfg, folder, walk, cfg2 = _config_from_temp_path(DICT, remove_level=0) 108 | assert cfg == config_from_dict({k: str(v) for k, v in DICT.items()}) 109 | cfg["extra.value"] = "1" 110 | assert cfg2 == cfg 111 | -------------------------------------------------------------------------------- /tests/test_python.py: -------------------------------------------------------------------------------- 1 | """Tests for paths.""" 2 | 3 | # ruff: noqa: D103 4 | 5 | import os 6 | import sys 7 | 8 | from config import config_from_dict, config_from_python 9 | 10 | DICT = { 11 | "a1.B1.c1": 1, 12 | "a1.b1.C2": 2, 13 | "A1.b1.c3": 3, 14 | "a1.b2.c1": "a", 15 | "a1.b2.c2": True, 16 | "a1.b2.c3": 1.1, 17 | "a2.b1.c1": "f", 18 | "a2.b1.c2": False, 19 | "a2.b1.c3": None, 20 | "a2.b2.c1": 10, 21 | "a2.b2.c2": "YWJjZGVmZ2g=", 22 | "a2.b2.c3": "abcdefgh", 23 | "sys.version": sys.hexversion, 24 | } 25 | 26 | 27 | def test_load_from_module(): # type: ignore 28 | from . import python_config 29 | 30 | cfg = config_from_python(python_config, prefix="CONFIG", lowercase_keys=True) 31 | assert cfg["a1.b2"].as_dict() == {"c1": "a", "c2": True, "c3": 1.1} 32 | assert cfg["sys.version"] == sys.hexversion 33 | 34 | 35 | def test_load_from_path(): # type: ignore 36 | path = os.path.join(os.path.dirname(__file__), "python_config.py") 37 | cfg = config_from_python(path, prefix="CONFIG", lowercase_keys=True) 38 | assert cfg["a1.b2"].as_dict() == {"c1": "a", "c2": True, "c3": 1.1} 39 | assert cfg["sys.version"] == sys.hexversion 40 | 41 | 42 | def test_load_from_module_string(): # type: ignore 43 | path = "tests.python_config" 44 | cfg = config_from_python(path, prefix="CONFIG", lowercase_keys=True) 45 | assert cfg["a1.b2"].as_dict() == {"c1": "a", "c2": True, "c3": 1.1} 46 | assert cfg["sys.version"] == sys.hexversion 47 | 48 | 49 | def test_equality(): # type: ignore 50 | from . import python_config 51 | 52 | cfg = config_from_python(python_config, prefix="CONFIG", lowercase_keys=True) 53 | assert cfg == config_from_dict(DICT, lowercase_keys=True) 54 | 55 | 56 | def test_equality_from_path(): # type: ignore 57 | path = os.path.join(os.path.dirname(__file__), "python_config.py") 58 | cfg = config_from_python(path, prefix="CONFIG", lowercase_keys=True) 59 | assert cfg == config_from_dict(DICT, lowercase_keys=True) 60 | 61 | 62 | def test_reload(): # type: ignore 63 | from . import python_config 64 | 65 | cfg = config_from_python(python_config, prefix="CONFIG", lowercase_keys=True) 66 | assert cfg == config_from_dict(DICT, lowercase_keys=True) 67 | 68 | python_config.CONFIG_A10_B10 = "a" 69 | cfg.reload() 70 | cfg2 = config_from_dict(DICT, lowercase_keys=True) 71 | cfg2["a10.b10"] = "a" 72 | assert cfg == cfg2 73 | 74 | 75 | def test_separator(): # type: ignore 76 | from . import python_config_2 77 | 78 | cfg = config_from_python( 79 | python_config_2, 80 | prefix="CONFIG", 81 | separator="__", 82 | lowercase_keys=True, 83 | ) 84 | assert cfg == config_from_dict(DICT, lowercase_keys=True) 85 | -------------------------------------------------------------------------------- /tests/test_toml.py: -------------------------------------------------------------------------------- 1 | """Tests for toml files.""" 2 | 3 | # ruff: noqa: D103,E501,SIM115 4 | 5 | import sys 6 | import tempfile 7 | from pathlib import Path 8 | 9 | import pytest 10 | from config import config, config_from_dict 11 | 12 | try: 13 | if sys.version_info < (3, 11): # pragma: no cover 14 | import tomli as toml 15 | else: # pragma: no cover 16 | import tomllib as toml 17 | 18 | from config import config_from_toml 19 | except ImportError: 20 | toml = None # type: ignore 21 | config_from_toml = None # type: ignore 22 | 23 | 24 | DICT = { 25 | "a1.b1.c1": 1, 26 | "a1.b1.c2": 2, 27 | "a1.b1.c3": 3, 28 | "a1.b2.c1": "a", 29 | "a1.b2.c2": True, 30 | "a1.b2.c3": 1.1, 31 | "a2.b1.c1": "f", 32 | "a2.b1.c2": False, 33 | "a2.b1.c3": "", 34 | "a2.b2.c1": 10, 35 | "a2.b2.c2": "YWJjZGVmZ2g=", 36 | "a2.b2.c3": "abcdefgh", 37 | } 38 | 39 | if toml: 40 | TOML = """ 41 | "a1.b1.c1" = 1 42 | "a1.b1.c2" = 2 43 | "a1.b1.c3" = 3 44 | "a1.b2.c1" = "a" 45 | "a1.b2.c2" = true 46 | "a1.b2.c3" = 1.1 47 | "a2.b1.c1" = "f" 48 | "a2.b1.c2" = false 49 | "a2.b1.c3" = "" 50 | "a2.b2.c1" = 10 51 | "a2.b2.c2" = "YWJjZGVmZ2g=" 52 | "a2.b2.c3" = "abcdefgh" 53 | """ 54 | 55 | TOML2 = """ 56 | [owner] 57 | name = "ABC" 58 | dob = 1979-05-27T07:32:00Z 59 | [database] 60 | server = "192.168.1.1" 61 | ports = [ 8001, 8001, 8002,] 62 | connection_max = 5000 63 | enabled = true 64 | [clients] 65 | data = [ [ "gamma", "delta",], [ 1, 2,],] 66 | hosts = [ "alpha", "omega",] 67 | [servers.alpha] 68 | ip = "10.0.0.1" 69 | dc = "eqdc10" 70 | [servers.beta] 71 | ip = "10.0.0.2" 72 | dc = "eqdc10" 73 | """ 74 | 75 | 76 | @pytest.mark.skipif("toml is None") 77 | def test_load_toml(): # type: ignore 78 | cfg = config_from_toml(TOML) 79 | assert cfg["a1.b1.c1"] == 1 80 | assert cfg["a1.b1"].as_dict() == {"c1": 1, "c2": 2, "c3": 3} 81 | assert cfg["a1.b2"].as_dict() == {"c1": "a", "c2": True, "c3": 1.1} 82 | 83 | 84 | @pytest.mark.skipif("toml is None") 85 | def test_load_toml_2(): # type: ignore 86 | cfg = config_from_toml(TOML2) 87 | assert cfg["owner.name"] == "ABC" 88 | assert cfg["servers"].as_dict() == { 89 | "alpha.dc": "eqdc10", 90 | "alpha.ip": "10.0.0.1", 91 | "beta.dc": "eqdc10", 92 | "beta.ip": "10.0.0.2", 93 | } 94 | assert cfg["clients.data"] == [["gamma", "delta"], [1, 2]] 95 | 96 | 97 | @pytest.mark.skipif("toml is None") 98 | def test_load_toml_file(): # type: ignore 99 | with tempfile.NamedTemporaryFile() as f: 100 | f.file.write(TOML.encode()) 101 | f.file.flush() 102 | cfg = config_from_toml(open(f.name, "rb"), read_from_file=True) 103 | assert cfg["a1.b1.c1"] == 1 104 | assert cfg["a1.b1"].as_dict() == {"c1": 1, "c2": 2, "c3": 3} 105 | assert cfg["a1.b2"].as_dict() == {"c1": "a", "c2": True, "c3": 1.1} 106 | assert cfg == config_from_dict(DICT) 107 | 108 | 109 | @pytest.mark.skipif("toml is None") 110 | def test_load_toml_filename(): # type: ignore 111 | with tempfile.NamedTemporaryFile() as f: 112 | f.file.write(TOML.encode()) 113 | f.file.flush() 114 | cfg = config_from_toml(f.name, read_from_file=True) 115 | assert cfg["a1.b1.c1"] == 1 116 | assert cfg["a1.b1"].as_dict() == {"c1": 1, "c2": 2, "c3": 3} 117 | assert cfg["a1.b2"].as_dict() == {"c1": "a", "c2": True, "c3": 1.1} 118 | assert cfg == config_from_dict(DICT) 119 | 120 | 121 | @pytest.mark.skipif("toml is None") 122 | def test_load_toml_filename_2(): # type: ignore 123 | with tempfile.NamedTemporaryFile() as f: 124 | f.file.write(TOML.encode()) 125 | f.file.flush() 126 | cfg = config_from_toml(Path(f.name), read_from_file=True) 127 | assert cfg["a1.b1.c1"] == 1 128 | assert cfg["a1.b1"].as_dict() == {"c1": 1, "c2": 2, "c3": 3} 129 | assert cfg["a1.b2"].as_dict() == {"c1": "a", "c2": True, "c3": 1.1} 130 | assert cfg == config_from_dict(DICT) 131 | 132 | 133 | @pytest.mark.skipif("toml is None") 134 | def test_load_toml_filename_3(): # type: ignore 135 | with tempfile.NamedTemporaryFile(suffix=".toml") as f: 136 | f.file.write(TOML.encode()) 137 | f.file.flush() 138 | cfg = config(f.name) 139 | assert cfg["a1.b1.c1"] == 1 140 | assert cfg["a1.b1"].as_dict() == {"c1": 1, "c2": 2, "c3": 3} 141 | assert cfg["a1.b2"].as_dict() == {"c1": "a", "c2": True, "c3": 1.1} 142 | assert cfg == config_from_dict(DICT) 143 | 144 | 145 | @pytest.mark.skipif("toml is None") 146 | def test_equality(): # type: ignore 147 | cfg = config_from_toml(TOML) 148 | assert cfg == config_from_dict(DICT) 149 | 150 | 151 | @pytest.mark.skipif("toml is None") 152 | def test_reload_toml(): # type: ignore 153 | with tempfile.NamedTemporaryFile() as f: 154 | f.file.write(TOML.encode()) 155 | f.file.flush() 156 | cfg = config_from_toml(f.name, read_from_file=True) 157 | assert cfg == config_from_dict(DICT) 158 | 159 | f.file.seek(0) 160 | f.file.truncate(0) 161 | f.file.write(b'[owner]\nname = "ABC"\n') 162 | f.file.flush() 163 | cfg.reload() 164 | assert cfg == config_from_dict({"owner.name": "ABC"}) 165 | 166 | 167 | @pytest.mark.skipif("toml is None") 168 | def test_reload_toml_with_section_prefix(): # type: ignore 169 | with tempfile.NamedTemporaryFile() as f: 170 | toml_input = """ 171 | [tool.coverage.run] 172 | branch = false 173 | parallel = false 174 | [database] 175 | server = "192.168.1.1" 176 | ports = [ 8001, 8001, 8002,] 177 | """ 178 | 179 | f.file.write(toml_input.encode()) 180 | f.file.flush() 181 | 182 | cfg = config_from_toml( 183 | f.name, 184 | section_prefix="tool.coverage.", 185 | read_from_file=True, 186 | ) 187 | expected = config_from_dict( 188 | { 189 | "run.branch": False, 190 | "run.parallel": False, 191 | }, 192 | ) 193 | 194 | assert cfg == expected 195 | 196 | f.file.seek(0) 197 | f.file.truncate(0) 198 | f.file.write(b"[tool.coverage.report]\nignore_errors = false\n") 199 | f.file.flush() 200 | cfg.reload() 201 | expected = config_from_dict( 202 | { 203 | "report.ignore_errors": False, 204 | }, 205 | ) 206 | assert cfg == expected 207 | -------------------------------------------------------------------------------- /tests/test_validation.py: -------------------------------------------------------------------------------- 1 | """Validation tests.""" 2 | 3 | # ruff: noqa: D103 4 | 5 | import pytest 6 | from config import ( 7 | config_from_dict, 8 | ) 9 | 10 | try: 11 | import jsonschema 12 | except ImportError: 13 | jsonschema = None 14 | 15 | 16 | @pytest.mark.skipif("jsonschema is None") 17 | def test_validation_ok(): # type: ignore 18 | d = {"items": [1, 3]} 19 | cfg = config_from_dict(d) 20 | 21 | schema = { 22 | "type": "object", 23 | "properties": { 24 | "items": { 25 | "type": "array", 26 | "items": {"enum": [1, 2, 3]}, 27 | "maxItems": 2, 28 | }, 29 | }, 30 | } 31 | 32 | assert cfg.validate(schema) 33 | 34 | 35 | @pytest.mark.skipif("jsonschema is None") 36 | def test_validation_fail(): # type: ignore 37 | from jsonschema.exceptions import ValidationError 38 | 39 | schema = { 40 | "type": "object", 41 | "properties": { 42 | "items": { 43 | "type": "array", 44 | "items": {"enum": [1, 2, 3]}, 45 | "maxItems": 2, 46 | }, 47 | }, 48 | } 49 | 50 | with pytest.raises(ValidationError) as err: 51 | d = {"items": [1, 4]} 52 | cfg = config_from_dict(d) 53 | assert not cfg.validate(schema) 54 | cfg.validate(schema, raise_on_error=True) 55 | assert "4 is not one of [1, 2, 3]" in str(err) 56 | 57 | with pytest.raises(ValidationError) as err: 58 | d = {"items": [1, 2, 3]} 59 | cfg = config_from_dict(d) 60 | assert not cfg.validate(schema) 61 | cfg.validate(schema, raise_on_error=True) 62 | assert "[1, 2, 3] is too long" in str(err) 63 | 64 | 65 | @pytest.mark.skipif("jsonschema is None") 66 | def test_validation_format(): # type: ignore 67 | from jsonschema import Draft202012Validator 68 | from jsonschema.exceptions import ValidationError 69 | 70 | schema = { 71 | "type": "object", 72 | "properties": { 73 | "ip": {"format": "ipv4"}, 74 | }, 75 | } 76 | 77 | cfg = config_from_dict({"ip": "10.0.0.1"}) 78 | assert cfg.validate(schema, format_checker=Draft202012Validator.FORMAT_CHECKER) 79 | 80 | # this passes since we didn't specify the format checker 81 | cfg = config_from_dict({"ip": "10"}) 82 | assert cfg.validate(schema) 83 | 84 | # fails with the format checker 85 | with pytest.raises(ValidationError) as err: 86 | cfg = config_from_dict({"ip": "10"}) 87 | cfg.validate( 88 | schema, 89 | raise_on_error=True, 90 | format_checker=Draft202012Validator.FORMAT_CHECKER, 91 | ) 92 | assert "'10' is not a 'ipv4'" in str(err) 93 | 94 | 95 | @pytest.mark.skipif("jsonschema is None") 96 | def test_validation_nested(): # type: ignore 97 | d = {"item": {"sub1": 1, "sub2": "abc"}} 98 | cfg = config_from_dict(d) 99 | 100 | schema = { 101 | "type": "object", 102 | "properties": { 103 | "item.sub1": {"type": "number"}, 104 | "item.sub2": {"type": "string"}, 105 | }, 106 | "required": ["item.sub1", "item.sub2"], 107 | } 108 | assert cfg.validate(schema) 109 | 110 | schema = { 111 | "type": "object", 112 | "properties": { 113 | "item": { 114 | "type": "object", 115 | "properties": { 116 | "sub1": {"type": "number"}, 117 | "sub2": {"type": "string"}, 118 | }, 119 | "required": ["sub1", "sub2"], 120 | }, 121 | }, 122 | "required": ["item"], 123 | } 124 | assert cfg.validate(schema, nested=True) 125 | -------------------------------------------------------------------------------- /tests/test_yaml.py: -------------------------------------------------------------------------------- 1 | """Tests for yaml files.""" 2 | 3 | # ruff: noqa: D103,E501,SIM115 4 | 5 | import tempfile 6 | from pathlib import Path 7 | 8 | import pytest 9 | from config import config_from_dict 10 | from pytest import raises 11 | 12 | try: 13 | import yaml 14 | from config import config_from_yaml 15 | except ImportError: 16 | yaml = None 17 | config_from_yaml = None # type: ignore 18 | 19 | 20 | DICT = { 21 | "a1.b1.c1": 1, 22 | "a1.b1.c2": 2, 23 | "a1.b1.c3": 3, 24 | "a1.b2.c1": "a", 25 | "a1.b2.c2": True, 26 | "a1.b2.c3": 1.1, 27 | "a2.b1.c1": "f", 28 | "a2.b1.c2": False, 29 | "a2.b1.c3": None, 30 | "a2.b2.c1": 10, 31 | "a2.b2.c2": "YWJjZGVmZ2g=", 32 | "a2.b2.c3": "abcdefgh", 33 | } 34 | 35 | if yaml: 36 | YAML = yaml.dump(DICT) 37 | 38 | YAML2 = """ 39 | # Employee records 40 | martin: 41 | name: Martin D'vloper 42 | job: Developer 43 | skills: 44 | - python 45 | - perl 46 | - pascal 47 | tabitha: 48 | name: Tabitha Bitumen 49 | job: Developer 50 | skills: 51 | - lisp 52 | - fortran 53 | - erlang 54 | """ 55 | 56 | YAML3 = """ 57 | - test: 58 | a: b 59 | c: d 60 | - test2: 61 | a: b 62 | c: d 63 | """ 64 | 65 | 66 | @pytest.mark.skipif("yaml is None") 67 | def test_load_yaml(): # type: ignore 68 | cfg = config_from_yaml(YAML) 69 | assert cfg["a1.b1.c1"] == 1 70 | assert cfg["a1.b1"].as_dict() == {"c1": 1, "c2": 2, "c3": 3} 71 | assert cfg["a1.b2"].as_dict() == {"c1": "a", "c2": True, "c3": 1.1} 72 | 73 | 74 | @pytest.mark.skipif("yaml is None") 75 | def test_load_yaml_2(): # type: ignore 76 | cfg = config_from_yaml(YAML2) 77 | assert cfg["martin.name"] == "Martin D'vloper" 78 | assert cfg["martin"].as_dict() == { 79 | "job": "Developer", 80 | "name": "Martin D'vloper", 81 | "skills": ["python", "perl", "pascal"], 82 | } 83 | assert cfg["martin.skills"] == ["python", "perl", "pascal"] 84 | 85 | 86 | @pytest.mark.skipif("yaml is None") 87 | def test_fails(): # type: ignore 88 | with raises(ValueError): 89 | config_from_yaml(YAML3) 90 | 91 | 92 | @pytest.mark.skipif("yaml is None") 93 | def test_load_yaml_file(): # type: ignore 94 | with tempfile.NamedTemporaryFile() as f: 95 | f.file.write(YAML.encode()) 96 | f.file.flush() 97 | cfg = config_from_yaml(open(f.name, "rt"), read_from_file=True) 98 | assert cfg["a1.b1.c1"] == 1 99 | assert cfg["a1.b1"].as_dict() == {"c1": 1, "c2": 2, "c3": 3} 100 | assert cfg["a1.b2"].as_dict() == {"c1": "a", "c2": True, "c3": 1.1} 101 | assert cfg == config_from_dict(DICT) 102 | 103 | 104 | @pytest.mark.skipif("yaml is None") 105 | def test_load_yaml_filename(): # type: ignore 106 | with tempfile.NamedTemporaryFile() as f: 107 | f.file.write(YAML.encode()) 108 | f.file.flush() 109 | cfg = config_from_yaml(f.name, read_from_file=True) 110 | assert cfg["a1.b1.c1"] == 1 111 | assert cfg["a1.b1"].as_dict() == {"c1": 1, "c2": 2, "c3": 3} 112 | assert cfg["a1.b2"].as_dict() == {"c1": "a", "c2": True, "c3": 1.1} 113 | assert cfg == config_from_dict(DICT) 114 | 115 | 116 | @pytest.mark.skipif("yaml is None") 117 | def test_load_yaml_filename_2(): # type: ignore 118 | with tempfile.NamedTemporaryFile() as f: 119 | f.file.write(YAML.encode()) 120 | f.file.flush() 121 | cfg = config_from_yaml(Path(f.name), read_from_file=True) 122 | assert cfg["a1.b1.c1"] == 1 123 | assert cfg["a1.b1"].as_dict() == {"c1": 1, "c2": 2, "c3": 3} 124 | assert cfg["a1.b2"].as_dict() == {"c1": "a", "c2": True, "c3": 1.1} 125 | assert cfg == config_from_dict(DICT) 126 | 127 | 128 | @pytest.mark.skipif("yaml is None") 129 | def test_equality(): # type: ignore 130 | cfg = config_from_yaml(YAML) 131 | assert cfg == config_from_dict(DICT) 132 | 133 | 134 | @pytest.mark.skipif("yaml is None") 135 | def test_reload_yaml(): # type: ignore 136 | with tempfile.NamedTemporaryFile() as f: 137 | f.file.write(YAML.encode()) 138 | f.file.flush() 139 | cfg = config_from_yaml(f.name, read_from_file=True) 140 | assert cfg == config_from_dict(DICT) 141 | 142 | f.file.seek(0) 143 | f.file.truncate(0) 144 | f.file.write(YAML2.encode()) 145 | f.file.flush() 146 | cfg.reload() 147 | assert cfg == config_from_yaml(YAML2) 148 | --------------------------------------------------------------------------------