├── .github ├── ISSUE_TEMPLATE │ ├── bug_report.md │ └── feature_request.md └── workflows │ ├── docs.yml │ └── test.yml ├── .gitignore ├── .pre-commit-config.yaml ├── LICENSE ├── Makefile ├── README.md ├── argdantic ├── __init__.py ├── convert.py ├── core.py ├── fields.py ├── parsing │ ├── __init__.py │ ├── actions.py │ └── arguments.py ├── py.typed ├── registry.py ├── sources │ ├── __init__.py │ ├── base.py │ ├── dynamic.py │ ├── json.py │ ├── toml.py │ └── yaml.py ├── stores │ ├── __init__.py │ ├── base.py │ ├── json.py │ ├── toml.py │ └── yaml.py ├── testing.py ├── utils.py └── version.py ├── docs ├── guide │ ├── composition.md │ ├── customization.md │ ├── intro.md │ ├── sources.md │ └── types.md └── index.md ├── examples ├── basic │ ├── empty.py │ ├── empty_cmd.py │ └── simple.py ├── composition │ ├── nested_models.py │ ├── nested_parsers.py │ └── singleton.py ├── customization │ ├── aliases.py │ ├── custom_name.py │ ├── custom_name_grouped.py │ ├── custom_name_multi.py │ ├── default_values.py │ ├── default_values_none.py │ ├── default_with_fields.py │ ├── description.py │ ├── description_docs.py │ └── description_fields.py ├── sources │ ├── dynamic.py │ ├── dynamic_custom.py │ ├── resources │ │ ├── dataset.yml │ │ └── optim.yml │ ├── settings.yaml │ └── single_cmd.py └── typing │ ├── choices.py │ ├── containers.py │ └── primitive.py ├── mkdocs.yml ├── pyproject.toml └── tests ├── __init__.py ├── conftest.py ├── test_basic.py ├── test_conversion.py ├── test_sources ├── __init__.py ├── test_json.py ├── test_misc.py ├── test_toml.py └── test_yaml.py ├── test_stores ├── __init__.py ├── test_json.py ├── test_toml.py └── test_yaml.py ├── test_testing.py ├── test_types ├── __init__.py ├── test_choices.py ├── test_forwardrefs.py ├── test_models.py ├── test_primitives.py └── test_sequences.py └── test_utils.py /.github/ISSUE_TEMPLATE/bug_report.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Bug report 3 | about: Create a report to help us improve 4 | title: '' 5 | labels: '' 6 | assignees: '' 7 | 8 | --- 9 | 10 | **Describe the bug** 11 | A clear and concise description of what the bug is. 12 | 13 | **To Reproduce** 14 | Steps to reproduce the behavior: 15 | 1. Go to '...' 16 | 2. Click on '....' 17 | 3. Scroll down to '....' 18 | 4. See error 19 | 20 | **Expected behavior** 21 | A clear and concise description of what you expected to happen. 22 | 23 | **Screenshots** 24 | If applicable, add screenshots to help explain your problem. 25 | 26 | **Desktop (please complete the following information):** 27 | - OS: [e.g. iOS] 28 | - Browser [e.g. chrome, safari] 29 | - Version [e.g. 22] 30 | 31 | **Smartphone (please complete the following information):** 32 | - Device: [e.g. iPhone6] 33 | - OS: [e.g. iOS8.1] 34 | - Browser [e.g. stock browser, safari] 35 | - Version [e.g. 22] 36 | 37 | **Additional context** 38 | Add any other context about the problem here. 39 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/feature_request.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Feature request 3 | about: Suggest an idea for this project 4 | title: '' 5 | labels: '' 6 | assignees: '' 7 | 8 | --- 9 | 10 | **Is your feature request related to a problem? Please describe.** 11 | A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] 12 | 13 | **Describe the solution you'd like** 14 | A clear and concise description of what you want to happen. 15 | 16 | **Describe alternatives you've considered** 17 | A clear and concise description of any alternative solutions or features you've considered. 18 | 19 | **Additional context** 20 | Add any other context or screenshots about the feature request here. 21 | -------------------------------------------------------------------------------- /.github/workflows/docs.yml: -------------------------------------------------------------------------------- 1 | name: docs 2 | on: 3 | push: 4 | branches: 5 | - main 6 | permissions: 7 | contents: write 8 | jobs: 9 | deploy: 10 | runs-on: ubuntu-latest 11 | steps: 12 | - uses: actions/checkout@v3 13 | - uses: actions/setup-python@v4 14 | with: 15 | python-version: 3.x 16 | - name: Install Flit 17 | run: pip install flit 18 | - name: Install Dependencies 19 | run: python -m flit install --symlink --extras docs 20 | - name: Deploy docs 21 | run: mkdocs gh-deploy --force 22 | -------------------------------------------------------------------------------- /.github/workflows/test.yml: -------------------------------------------------------------------------------- 1 | # This workflow will install Python dependencies, run tests and lint with a variety of Python versions 2 | # For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions 3 | 4 | name: test 5 | 6 | on: 7 | push: 8 | branches: [main] 9 | pull_request: 10 | branches: [main] 11 | 12 | jobs: 13 | build: 14 | runs-on: ${{ matrix.os }} 15 | strategy: 16 | fail-fast: false 17 | matrix: 18 | python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"] 19 | os: [ubuntu-latest, macOS-latest, windows-latest] 20 | 21 | steps: 22 | - uses: actions/checkout@v4 23 | - name: Set up Python ${{ matrix.python-version }} 24 | uses: actions/setup-python@v5 25 | with: 26 | python-version: ${{ matrix.python-version }} 27 | cache: pip 28 | - name: Install Flit 29 | run: pip install flit 30 | - name: Install Dependencies 31 | run: pip install -e ".[all,dev,test]" 32 | - name: Lint code 33 | run: | 34 | ruff check . 35 | ruff format --check . 36 | - name: Test code 37 | run: | 38 | pytest --cov=argdantic --cov-report=xml 39 | - name: Upload coverage 40 | uses: codecov/codecov-action@v2 41 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # IDEs 2 | .vscode 3 | .idea 4 | .iml 5 | 6 | # Byte-compiled / optimized / DLL files 7 | __pycache__/ 8 | *.py[cod] 9 | *$py.class 10 | 11 | # C extensions 12 | *.so 13 | 14 | # Distribution / packaging 15 | .Python 16 | build/ 17 | develop-eggs/ 18 | dist/ 19 | downloads/ 20 | eggs/ 21 | .eggs/ 22 | lib/ 23 | lib64/ 24 | parts/ 25 | sdist/ 26 | var/ 27 | wheels/ 28 | pip-wheel-metadata/ 29 | share/python-wheels/ 30 | *.egg-info/ 31 | .installed.cfg 32 | *.egg 33 | .pypirc 34 | MANIFEST 35 | 36 | # PyInstaller 37 | # Usually these files are written by a python script from a template 38 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 39 | *.manifest 40 | *.spec 41 | 42 | # Installer logs 43 | pip-log.txt 44 | pip-delete-this-directory.txt 45 | 46 | # Unit test / coverage reports 47 | htmlcov/ 48 | .tox/ 49 | .nox/ 50 | .coverage 51 | .coverage.* 52 | .cache 53 | nosetests.xml 54 | coverage.xml 55 | *.cover 56 | *.py,cover 57 | .hypothesis/ 58 | .pytest_cache/ 59 | 60 | # Translations 61 | *.mo 62 | *.pot 63 | 64 | # Django stuff: 65 | *.log 66 | local_settings.py 67 | db.sqlite3 68 | db.sqlite3-journal 69 | 70 | # Flask stuff: 71 | instance/ 72 | .webassets-cache 73 | 74 | # Scrapy stuff: 75 | .scrapy 76 | 77 | # Sphinx documentation 78 | docs/_build/ 79 | 80 | # PyBuilder 81 | target/ 82 | 83 | # Jupyter Notebook 84 | .ipynb_checkpoints 85 | 86 | # IPython 87 | profile_default/ 88 | ipython_config.py 89 | 90 | # pyenv 91 | .python-version 92 | 93 | # pipenv 94 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 95 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 96 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 97 | # install all needed dependencies. 98 | #Pipfile.lock 99 | 100 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 101 | __pypackages__/ 102 | 103 | # Celery stuff 104 | celerybeat-schedule 105 | celerybeat.pid 106 | 107 | # SageMath parsed files 108 | *.sage.py 109 | 110 | # Environments 111 | .env 112 | .venv 113 | .venv-*/ 114 | env/ 115 | venv/ 116 | ENV/ 117 | env.bak/ 118 | venv.bak/ 119 | 120 | # Spyder project settings 121 | .spyderproject 122 | .spyproject 123 | 124 | # Rope project settings 125 | .ropeproject 126 | 127 | # mkdocs documentation 128 | /site 129 | 130 | # mypy 131 | .mypy_cache/ 132 | .dmypy.json 133 | dmypy.json 134 | 135 | # Pyre type checker 136 | .pyre/ 137 | 138 | # Misc 139 | *.txt 140 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | # See https://pre-commit.com for more information 2 | # See https://pre-commit.com/hooks.html for more hooks 3 | repos: 4 | - repo: https://github.com/pre-commit/pre-commit-hooks 5 | rev: v4.3.0 6 | hooks: 7 | - id: check-added-large-files 8 | - id: check-toml 9 | - id: end-of-file-fixer 10 | - id: trailing-whitespace 11 | - repo: https://github.com/astral-sh/ruff-pre-commit 12 | # Ruff version. 13 | rev: v0.9.0 14 | hooks: 15 | # Run the linter. 16 | - id: ruff 17 | # Run the formatter. 18 | - id: ruff-format 19 | - repo: https://github.com/pre-commit/mirrors-mypy 20 | rev: v1.11.1 21 | hooks: 22 | - id: mypy 23 | additional_dependencies: ["types-toml", "types-PyYAML"] 24 | exclude: "tests|examples" 25 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2022 Edoardo Arnaudo 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | .ONESHELL: 2 | PY_ENV=.venv 3 | PY_BIN=$(shell python -c "print('$(PY_ENV)/bin') if __import__('pathlib').Path('$(PY_ENV)/bin/pip').exists() else print('')") 4 | 5 | .PHONY: help 6 | help: ## Show the help. 7 | @echo "Usage: make " 8 | @echo "" 9 | @echo "Targets:" 10 | @fgrep "##" Makefile | fgrep -v fgrep 11 | 12 | .PHONY: check-venv 13 | check-venv: ## Check if the virtualenv exists. 14 | @if [ "$(PY_BIN)" = "" ]; then echo "No virtualenv detected, create one using 'make virtualenv'"; exit 1; else echo "Found venv $(PY_BIN)"; fi 15 | 16 | .PHONY: fmt 17 | fmt: check-venv 18 | $(PY_BIN)/ruff format . 19 | 20 | .PHONY: lint 21 | lint: check-venv 22 | $(PY_BIN)/ruff check . 23 | $(PY_BIN)/ruff format --check . 24 | @if [ -x "$(PY_BIN)/mypy" ]; then $(PY_BIN)/mypy argdantic/; else echo "mypy not installed, skipping"; fi 25 | 26 | .PHONY: test 27 | test: check-venv lint 28 | $(PY_BIN)/pytest --cov=argdantic --cov-report=xml -o console_output_style=progress 29 | 30 | .PHONY: publish 31 | publish: check-venv 32 | $(PY_BIN)/flit publish --pypirc $(CFG) 33 | 34 | .PHONY: clean 35 | clean: ## Clean unused files. 36 | @find ./ -name '*.pyc' -exec rm -f {} \; 37 | @find ./ -name '__pycache__' -exec rm -rf {} \; 38 | @find ./ -name 'Thumbs.db' -exec rm -f {} \; 39 | @find ./ -name '*~' -exec rm -f {} \; 40 | @rm -rf .cache 41 | @rm -rf .pytest_cache 42 | @rm -rf .mypy_cache 43 | @rm -rf build 44 | @rm -rf dist 45 | @rm -rf *.egg-info 46 | @rm -rf htmlcov 47 | @rm -rf .tox/ 48 | @rm -rf docs/_build 49 | 50 | .PHONY: release 51 | release: ## Create a new tag for release. 52 | @echo "WARNING: This operation will create s version tag and push to github" 53 | @read -p "Version? (provide the next x.y.z semver) : " TAG 54 | @VER_FILE=$$(find argdantic -maxdepth 2 -type f -name 'version.py' | head -n 1) 55 | @echo "Updating version file :\n $${VER_FILE}" 56 | @echo __version__ = \""$${TAG}"\" > $${VER_FILE} 57 | @git add . 58 | @git commit -m "🔖 Release version v$${TAG}" 59 | @echo "creating git tag : v$${TAG}" 60 | @git tag v$${TAG} 61 | @git push -u origin HEAD --tags 62 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # argdantic 2 | Typed command line interfaces with [`argparse`](https://docs.python.org/3/library/argparse.html) and [`pydantic`](https://github.com/pydantic/pydantic). 3 | 4 | [![test passing](https://img.shields.io/github/actions/workflow/status/edornd/argdantic/test.yml?branch=main)](https://github.com/edornd/argdantic) 5 | [![coverage](https://img.shields.io/codecov/c/gh/edornd/argdantic)](https://codecov.io/gh/edornd/argdantic) 6 | [![pypi version](https://img.shields.io/pypi/v/argdantic)](https://pypi.org/project/argdantic/) 7 | [![python versions](https://img.shields.io/pypi/pyversions/argdantic)](https://github.com/edornd/argdantic) 8 | 9 | [![license](https://img.shields.io/github/license/edornd/argdantic)](https://github.com/edornd/argdantic) 10 | [![documentation](https://img.shields.io/badge/documentation-%F0%9F%93%9A-blue)](https://edornd.github.io/argdantic/) 11 | --- 12 | 13 | ## Features 14 | 15 | `argdantic` provides a thin boilerplate layer to provide a modern CLI experience, including: 16 | - **Typed arguments:** arguments require full typing by default, enforcing clarity and help your editor provide better support (linting, hinting). 17 | - **Nested models:** exploit `pydantic` models to scale from simple primitives to complex nested configurations with little effort. 18 | - **Nested commands:** combine commands and build complex hierarchies to build complex interfaces. 19 | - **Validation by default:** thanks to `pydantic`, field validation is provided by default, with the desired complexity. 20 | - **Multiple sources:** arguments can be provided from multiple sources, including environment variables, JSON, TOML and YAML files. 21 | 22 | ## Quickstart 23 | 24 | ### Installation 25 | Installing `argdantic` can be done from source, or simply using `pip`. 26 | The only required dependency is, of course, *pydantic*, while the remaining can be selected depending on your needs: 27 | 28 | ```console 29 | recommended choice: install everything 30 | this includes orjson, pyyaml, tomli, python-dotenv 31 | user@pc:~$ pip install argdantic[all] 32 | 33 | env, json, toml or yaml dependencies 34 | user@pc:~$ pip install argdantic[env|json|toml|yaml] 35 | 36 | minimum requirement, only pydantic included 37 | user@pc:~$ pip install argdantic 38 | ``` 39 | 40 | ### A Simple Example 41 | 42 | Creating a CLI with `argdantic` can be as simple as: 43 | 44 | ```python 45 | from argdantic import ArgParser 46 | 47 | # 1. create a CLI instance 48 | parser = ArgParser() 49 | 50 | 51 | # 2. decorate the function to be called 52 | @parser.command() 53 | def buy(name: str, quantity: int, price: float): 54 | print(f"Bought {quantity} {name} at ${price:.2f}.") 55 | 56 | # 3. Use your CLI by simply calling it 57 | if __name__ == "__main__": 58 | parser() 59 | ``` 60 | 61 | Then, in a terminal, the `help` command can provide the usual information: 62 | 63 | ```console 64 | $ python cli.py --help 65 | > usage: buy [-h] --name TEXT --quantity INT --price FLOAT 66 | > 67 | > optional arguments: 68 | > -h, --help show this help message and exit 69 | > --name TEXT 70 | > --quantity INT 71 | > --price FLOAT 72 | ``` 73 | 74 | This gives us the required arguments for the execution: 75 | 76 | ```console 77 | $ python cli.py --name apples --quantity 10 --price 3.4 78 | > Bought 10 apples at $3.40. 79 | ``` 80 | 81 | ### Using Models 82 | 83 | Plain arguments and `pydantic` models can be mixed together: 84 | 85 | ```python 86 | from argdantic import ArgParser 87 | from pydantic import BaseModel 88 | 89 | parser = ArgParser() 90 | 91 | 92 | class Item(BaseModel): 93 | name: str 94 | price: float 95 | 96 | 97 | @parser.command() 98 | def buy(item: Item, quantity: int): 99 | print(f"Bought {quantity} X {item.name} at ${item.price:.2f}.") 100 | 101 | if __name__ == "__main__": 102 | parser() 103 | ``` 104 | 105 | This will produce the following help: 106 | 107 | ```console 108 | usage: cli.py [-h] --item.name TEXT --item.price FLOAT --quantity INT 109 | 110 | optional arguments: 111 | -h, --help show this help message and exit 112 | --item.name TEXT 113 | --item.price FLOAT 114 | --quantity INT 115 | ``` 116 | 117 | ### Arguments From Different Sources 118 | 119 | `argdantic` supports several inputs: 120 | - **`.env` files**, environment variables, and secrets thanks to *pydantic*. 121 | - **JSON files**, using either the standard `json` library, or `orjson` if available. 122 | - **YAML files**, using the `pyyaml` library. 123 | - **TOML files**, using the lightweight `tomli` library. 124 | 125 | Sources can be imported and added to each command independently, as such: 126 | 127 | ```python 128 | from argdantic import ArgParser 129 | from argdantic.sources import EnvSettingsSource, JsonSettingsSource 130 | 131 | parser = ArgParser() 132 | 133 | 134 | @parser.command( 135 | sources=[ 136 | EnvSettingsSource(env_file=".env", env_file_encoding="utf-8"), 137 | JsonSettingsSource(path="settings.json"), 138 | ] 139 | ) 140 | def sell(item: str, quantity: int, value: float): 141 | print(f"Selling: {item} x {quantity}, {value:.2f}$") 142 | 143 | 144 | if __name__ == "__main__": 145 | parser() 146 | ``` 147 | 148 | This is just a brief introduction to the library, more examples and details can be found in the [documentation](https://edornd.github.io/argdantic/). 149 | 150 | ## Contributing 151 | 152 | Contributions are welcome! You can open a new issue to report bugs, or suggest new features. If you're brave enough, pull requests are also welcome. 153 | -------------------------------------------------------------------------------- /argdantic/__init__.py: -------------------------------------------------------------------------------- 1 | """Typed command line interfaces with argparse and pydantic""" 2 | 3 | from argdantic.core import ArgParser 4 | from argdantic.fields import ArgField 5 | from argdantic.version import __version__ 6 | 7 | __all__ = [ 8 | "__version__", 9 | "ArgParser", 10 | "ArgField", 11 | ] 12 | -------------------------------------------------------------------------------- /argdantic/convert.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | from pathlib import Path 3 | from typing import Any, Dict, Generator, Optional, Tuple, Type, cast, get_args 4 | 5 | from pydantic import BaseModel 6 | from pydantic.fields import FieldInfo 7 | from pydantic.v1.utils import lenient_issubclass 8 | from pydantic_core import PydanticUndefined 9 | 10 | from argdantic.parsing import ActionTracker, Argument, PrimitiveArgument, registry 11 | from argdantic.sources import DEFAULT_SOURCE_FIELD 12 | from argdantic.utils import get_optional_type, is_optional 13 | 14 | 15 | def format_description(description: Optional[str], has_default: bool, is_required: bool) -> str: 16 | """Formats the field description, adding additional info about defaults and if it is required. 17 | 18 | Args: 19 | description (str): The description. 20 | has_default (bool): If the field has a default value. 21 | is_required (bool): If the field is required. 22 | 23 | Returns: 24 | str: The formatted description. 25 | """ 26 | suffix = None 27 | # if it already has a default, it is not required 28 | if is_required: 29 | suffix = "(required)" 30 | elif has_default: 31 | suffix = "(default: %(default)s)" 32 | # handle cases: 33 | # - when there is no prefix, return the description as is (also handles None) 34 | # - when there is no description, return the prefix as is (also handles None) 35 | # - when there is both, return the prefix and description 36 | if suffix is None: 37 | return description or "" 38 | if description is None: 39 | return suffix 40 | return f"{description} {suffix}" 41 | 42 | 43 | def argument_from_field( 44 | field_info: FieldInfo, 45 | kebab_name: str, 46 | delimiter: str, 47 | internal_delimiter: str, 48 | parent_path: Tuple[str, ...], 49 | custom_identifier: Optional[str] = None, 50 | ) -> Argument: 51 | """Converts a pydantic field to a single argument. 52 | 53 | Args: 54 | field (Field): The field to convert. 55 | kebab_name (str): The kebab case name of the field. 56 | delimiter (str): The delimiter to use for the argument names. 57 | internal_delimiter (str): The delimiter to use for the internal names. 58 | parent_path (Tuple[str, ...]): The parent path of the field. 59 | 60 | Returns: 61 | Argument: The argument. 62 | """ 63 | # this function should only deal with non-pydantic objects 64 | assert not lenient_issubclass(field_info.annotation, BaseModel) 65 | base_option_name = delimiter.join(parent_path + (kebab_name,)) 66 | full_option_name = f"--{base_option_name}" 67 | extra_fields: Dict[str, Any] = ( 68 | field_info.json_schema_extra or {} if isinstance(field_info.json_schema_extra, dict) else {} 69 | ) 70 | extra_names = extra_fields.get("names", ()) 71 | 72 | # example.test-attribute -> example__test_attribute 73 | identifier = custom_identifier or base_option_name.replace(delimiter, internal_delimiter).replace("-", "_") 74 | # handle optional types, the only case where we currently support Unions 75 | field_type: Type[Any] = field_info.annotation # type: ignore 76 | if is_optional(field_info.annotation): 77 | field_type = get_args(field_info.annotation)[0] 78 | 79 | field_names = (full_option_name, *extra_names) 80 | has_default = field_info.default is not PydanticUndefined and field_info.default is not None 81 | field_default = field_info.default if has_default else argparse.SUPPRESS 82 | description = format_description(field_info.description, has_default, field_info.is_required()) 83 | 84 | arg_class = registry.get(field_type, PrimitiveArgument) 85 | return arg_class( 86 | *field_names, 87 | identifier=identifier, 88 | field_type=field_type, 89 | default=field_default, 90 | required=field_info.is_required(), 91 | description=description, 92 | ) 93 | 94 | 95 | def model_to_args( 96 | model: Type[BaseModel], 97 | delimiter: str, 98 | internal_delimiter: str, 99 | parent_path: Tuple[str, ...] = tuple(), 100 | ) -> Generator[Argument, None, None]: 101 | """Converts a pydantic model to a list of arguments. 102 | 103 | Args: 104 | model (Type[BaseModel]): The model to convert. 105 | delimiter (str): The delimiter to use for the argument names. 106 | internal_delimiter (str): The delimiter to use for the internal names. 107 | parent_path (Tuple[str, ...], optional): The parent path. Defaults to tuple(). 108 | 109 | Returns: 110 | ArgumentParser: The argument parser. 111 | """ 112 | # iterate over fields in the settings 113 | for field_name, field_info in model.model_fields.items(): 114 | # checks on delimiters to be done 115 | kebab_name = field_name.replace("_", "-") 116 | assert internal_delimiter not in kebab_name 117 | 118 | annotation = ( 119 | field_info.annotation 120 | if not is_optional(field_info.annotation) 121 | else get_optional_type(field_info.annotation) 122 | ) 123 | if lenient_issubclass(annotation, BaseModel): 124 | yield from model_to_args( 125 | cast(Type[BaseModel], annotation), 126 | delimiter, 127 | internal_delimiter, 128 | parent_path=parent_path + (kebab_name,), 129 | ) 130 | # if the model requires a file source, we add an extra argument 131 | # whose name is the same as the model's name (yes I'm not gonna bother with mypy here) 132 | if hasattr(annotation, "__arg_source_field__") and annotation.__arg_source_field__ is None: # type: ignore 133 | default = PydanticUndefined if annotation.__arg_source_required__ else None # type: ignore 134 | info = FieldInfo( 135 | annotation=Path, 136 | alias=field_info.alias, 137 | title=field_info.title, 138 | description=field_info.description, 139 | default=default, 140 | json_schema_extra=field_info.json_schema_extra, 141 | ) 142 | base_name = delimiter.join(parent_path + (kebab_name,)) 143 | internal_name = base_name.replace(delimiter, internal_delimiter).replace("-", "_") 144 | custom_identifier = f"{internal_name}{internal_delimiter}{DEFAULT_SOURCE_FIELD}" 145 | yield argument_from_field( 146 | field_info=info, 147 | kebab_name=kebab_name, 148 | delimiter=delimiter, 149 | internal_delimiter=internal_delimiter, 150 | parent_path=parent_path, 151 | custom_identifier=custom_identifier, 152 | ) 153 | continue 154 | # simple fields 155 | yield argument_from_field( 156 | field_info=field_info, 157 | kebab_name=kebab_name, 158 | delimiter=delimiter, 159 | internal_delimiter=internal_delimiter, 160 | parent_path=parent_path, 161 | ) 162 | 163 | 164 | def args_to_dict_tree( 165 | kwargs: Dict[str, Any], 166 | internal_delimiter: str, 167 | remove_helpers: bool = True, 168 | cli_trackers: Optional[Dict[str, ActionTracker]] = None, 169 | ) -> Dict[str, Any]: 170 | """Transforms a flat dictionary of identifiers and values back into a complex object made of nested dictionaries. 171 | E.g. the following input: `animal__type='dog', animal__name='roger', animal__owner__name='Mark'` 172 | becomes: `{animal: {name: 'roger', type: 'dog'}, owner: {name: 'Mark'}}` 173 | 174 | Args: 175 | kwargs (Dict[str, Any]): flat dictionary of available fields 176 | internal_delimiter (str): delimiter required to split fields 177 | remove_helpers (bool, optional): whether to remove helper fields (e.g., __func__). Defaults to True. 178 | cli_trackers (Dict[str, ActionTracker], optional): dictionary of action trackers. Defaults to None. 179 | 180 | Returns: 181 | Dict[str, Any]: nested dictionary of properties to be converted into pydantic models 182 | """ 183 | cli_trackers = cli_trackers or {} 184 | result: Dict[str, Any] = dict() 185 | for name, value in kwargs.items(): 186 | if remove_helpers and name.startswith("__"): 187 | continue 188 | if tracker := cli_trackers.get(name): 189 | if not tracker.is_set(): 190 | continue 191 | # split full name into parts 192 | parts = name.split(internal_delimiter) 193 | # create nested dicts corresponding to each part 194 | # test__inner__value -> {test: {inner: value}} 195 | nested = result 196 | for part in parts[:-1]: 197 | if part not in nested: 198 | nested[part] = dict() 199 | nested = nested[part] 200 | nested[parts[-1]] = value 201 | return dict(result) 202 | -------------------------------------------------------------------------------- /argdantic/core.py: -------------------------------------------------------------------------------- 1 | import inspect 2 | from argparse import ArgumentParser, Namespace, _SubParsersAction 3 | from typing import Any, Callable, Dict, Generic, Iterable, List, Optional, Sequence, Type, TypeVar, cast, get_type_hints 4 | 5 | from pydantic import BaseModel, ValidationError, create_model 6 | from pydantic.v1.utils import lenient_issubclass 7 | from pydantic_settings import BaseSettings 8 | from pydantic_settings.sources import PydanticBaseSettingsSource 9 | 10 | from argdantic.convert import args_to_dict_tree, model_to_args 11 | from argdantic.parsing import Argument 12 | from argdantic.parsing.arguments import ActionTracker 13 | from argdantic.stores import SettingsStoreCallable 14 | 15 | SettingSourceCallable = Callable[[Type[BaseSettings]], PydanticBaseSettingsSource] 16 | ParserType = TypeVar("ParserType", bound="ArgParser") 17 | 18 | 19 | class Command: 20 | """ 21 | A command represents a single function that can be invoked from the command line. 22 | It is composed of a callback function, a list of arguments, and a pydantic model 23 | that is used to validate the arguments. 24 | """ 25 | 26 | def __init__( 27 | self, 28 | callback: Callable, 29 | arguments: Iterable[Argument], 30 | model_class: Type[BaseModel], 31 | name: str, 32 | description: Optional[str] = None, 33 | singleton: bool = False, 34 | stores: Optional[List[SettingsStoreCallable]] = None, 35 | delimiter: str = "__", 36 | ) -> None: 37 | assert callback is not None, "Callback must be a callable object" 38 | self.name = name 39 | self.description = description 40 | self.singleton = singleton 41 | self.callback = callback 42 | self.model_class = model_class 43 | self.delimiter = delimiter 44 | self.arguments = arguments or [] 45 | self.stores = stores or [] 46 | self.trackers: Dict[str, ActionTracker] = {} 47 | 48 | def __repr__(self) -> str: 49 | return f"" 50 | 51 | def __call__(self, args: Namespace) -> Any: 52 | """ 53 | Invoke the command with the arguments already parsed by argparse, 54 | and return the result, which is the return value of the callback. 55 | Arguments are converted into a dictionary and passed to the pydantic model 56 | for validation. The validated model is then passed to the callback. 57 | 58 | Args: 59 | args (Namespace): parsed arguments provided by argparse. 60 | 61 | Returns: 62 | Any: return value of the callback. 63 | """ 64 | # transform the arguments into a dictionary tree for validation 65 | kwargs = vars(args) 66 | raw_data = args_to_dict_tree( 67 | kwargs, 68 | internal_delimiter=self.delimiter, 69 | remove_helpers=True, 70 | cli_trackers=self.trackers, 71 | ) 72 | # validate the arguments against the pydantic model 73 | validated = self.model_class(**raw_data) 74 | # store the validated arguments in the settings stores 75 | for store in self.stores: 76 | store(validated) 77 | # invoke the callback with the validated arguments 78 | # if the command is a singleton, pass the model as a single argument 79 | if self.singleton: 80 | return self.callback(validated) 81 | destructured = {k: getattr(validated, k) for k in validated.model_fields.keys()} 82 | return self.callback(**destructured) 83 | 84 | def build(self, parser: ArgumentParser) -> None: 85 | """ 86 | Build the command by adding all arguments to the parser. 87 | 88 | Args: 89 | parser (ArgumentParser): parser to add the arguments to. 90 | """ 91 | for argument in self.arguments: 92 | tracker = argument.build(parser=parser) 93 | self.trackers[argument.identifier] = tracker 94 | parser.set_defaults(__func__=self) 95 | 96 | 97 | class ArgParser(Generic[ParserType]): 98 | """ 99 | A parser is a collection of commands and subparsers. 100 | It is responsible for building the entrypoint for the command line interface, 101 | and invoking the correct command by constructing the parser hierarchy. 102 | """ 103 | 104 | def __init__( 105 | self, 106 | name: Optional[str] = None, 107 | description: Optional[str] = None, 108 | force_group: bool = False, 109 | delimiter: str = ".", 110 | internal_delimiter: str = "__", 111 | subcommand_meta: str = "", 112 | ) -> None: 113 | self.entrypoint: Optional[ArgumentParser] = None 114 | self.name = name 115 | self.description = description 116 | self.force_group = force_group 117 | self.commands: List[Command] = [] 118 | self.groups: List[ParserType] = [] 119 | # internal variables 120 | assert internal_delimiter.isidentifier(), ( 121 | f"The internal delimiter {internal_delimiter} is not a valid identifier" 122 | ) 123 | 124 | self._delimiter = delimiter 125 | self._internal_delimiter = internal_delimiter 126 | self._subcommand_meta = subcommand_meta 127 | # keeping a reference to subparser is necessary to add subparsers 128 | # Each cli level can only have one subparser. 129 | self._subparser: Optional[_SubParsersAction] = None 130 | 131 | def __repr__(self) -> str: 132 | name = f" '{self.name}'" if self.name else "" 133 | return f"" 134 | 135 | def __call__(self, args: Optional[Namespace] = None) -> Any: 136 | """ 137 | Invoke the parser by building the entrypoint and parsing the arguments. 138 | The result is the return value of the callback of the invoked command. 139 | 140 | Args: 141 | args (Sequence[Any], optional): arguments to parse. Defaults to None. 142 | 143 | Returns: 144 | Any: return value of the callback. 145 | """ 146 | if self.entrypoint is None: 147 | self.entrypoint = self._build_entrypoint() 148 | try: 149 | args = self.entrypoint.parse_args(cast(Sequence[str], args)) 150 | return args.__func__(args) 151 | except ValidationError as e: 152 | self.entrypoint.error(self._format_validation_error(e)) 153 | 154 | def _format_validation_error(self, exception: ValidationError) -> str: 155 | """ 156 | Format a validation error into a string, providinf a more human-readable representation. 157 | 158 | Args: 159 | exception (ValidationError): validation error raised by pydantic. 160 | 161 | Returns: 162 | str: formatted string. 163 | """ 164 | errors = exception.errors() 165 | num_errors = len(errors) 166 | intro = f"{num_errors} validation {'error' if num_errors == 1 else 'errors'} while parsing arguments\n" 167 | body = [] 168 | max_len = 0 169 | for error in errors: 170 | location = " -> ".join(str(e) for e in error["loc"]) 171 | max_len = max(max_len, len(location)) 172 | body.append((location, error["msg"])) 173 | body_str = "\n".join(f"{location:<{max_len}}: {msg}" for location, msg in body) 174 | return f"{intro}{body_str}" 175 | 176 | def _get_subparser( 177 | self, 178 | parser: ArgumentParser, 179 | *, 180 | destination: str = "group", 181 | ) -> _SubParsersAction: 182 | """ 183 | Get the subparser for the current parser. If it does not exist, create it. 184 | 185 | Args: 186 | destination (str, optional): destination of the subparser. Defaults to "group". 187 | 188 | Returns: 189 | _SubParsersAction: subparser. 190 | """ 191 | if self._subparser is None: 192 | self._subparser = parser.add_subparsers(dest=destination, required=True, metavar=self._subcommand_meta) 193 | return self._subparser 194 | 195 | def _build_entrypoint(self, parser: Optional[ArgumentParser] = None, level: int = 0) -> ArgumentParser: 196 | """ 197 | Construct the entrypoint for the command line interface. This is a recursive 198 | function that builds the entrypoint for the current parser and all subparsers. 199 | 200 | Args: 201 | parser (ArgumentParser, optional): Current parser to pass around. Defaults to None. 202 | 203 | Returns: 204 | Callable: the main parser to be invoked as root. 205 | """ 206 | assert self.commands or self.groups, "Parser must have at least one command or group of commands" 207 | # if the root parser is not provided, create a new one 208 | # else, create a subparser for the current parser 209 | if parser is None: 210 | parser = ArgumentParser(prog=self.name, description=self.description) 211 | 212 | # then build the entrypoint for the current parser 213 | if len(self.commands) == 1 and not self.groups and not self.force_group: 214 | parser.description = self.commands[0].description 215 | self.commands[0].build(parser=parser) 216 | else: 217 | subparsers = self._get_subparser(parser, destination=f"__group{level}__") 218 | for command in self.commands: 219 | subparser = subparsers.add_parser(command.name, help=command.description) 220 | command.build(parser=subparser) 221 | 222 | # last, build the entrypoint for all subparsers 223 | for group in self.groups: 224 | sublevel = level + 1 225 | subparser = self._get_subparser(parser, destination=f"__group{sublevel}__") 226 | group._build_entrypoint( 227 | parser=subparser.add_parser(group.name, help=group.description), 228 | level=sublevel, 229 | ) 230 | return parser 231 | 232 | def command( 233 | self, 234 | name: Optional[str] = None, 235 | help: Optional[str] = None, 236 | sources: Optional[List[SettingSourceCallable]] = None, 237 | stores: Optional[List[SettingsStoreCallable]] = None, 238 | singleton: bool = False, 239 | ) -> Callable: 240 | """Decorator to register a function as a command. 241 | 242 | Args: 243 | name (str, optional): Name of the command. Defaults to the function name when not provided. 244 | help (str, optional): Help text for the command. Defaults to the function docstring when not provided. 245 | delimiter (str, optional): Custom delimiter character. Defaults to ".". 246 | internal_delimiter (str, optional): Custom internal delimiter. Defaults to "__". 247 | 248 | Returns: 249 | Callable: The same function, promoted to a command. 250 | """ 251 | assert sources is None or isinstance(sources, list), "Sources must be a list of callables" 252 | assert stores is None or isinstance(stores, list), "Stores must be a list of callables" 253 | 254 | def decorator(f: Callable) -> Command: 255 | # create a name or use the provided one 256 | command_name = name or f.__name__.lower().replace("_", "-") 257 | command_help = help or inspect.getdoc(f) 258 | # extract function parameters and prepare list of click params 259 | # assign the same function as callback for empty commands 260 | f.__annotations__ = get_type_hints(f) 261 | func_params = list(inspect.signature(f).parameters.items()) 262 | # if we have a configuration parse it, otherwise handle empty commands 263 | # wrap everything into a wrapper model, so that multiple inputs can be provided 264 | arguments = None 265 | model_class = None 266 | wrapped_fields = dict() 267 | if func_params: 268 | # if the function expects a single argument, we do not wrap it 269 | # otherwise, we prepare the fields for the wrapper model 270 | if singleton: 271 | assert len(func_params) == 1, ( 272 | f"The command '{command_name}' expects a single argument, but {len(func_params)} were provided" 273 | ) 274 | param_name, param = func_params[0] 275 | assert lenient_issubclass(param.annotation, BaseModel), ( 276 | f"The singleton argument '{param_name}' must be a pydantic model" 277 | ) 278 | model_class = param.annotation 279 | 280 | else: 281 | for param_name, param in func_params: 282 | assert param.annotation is not inspect.Parameter.empty, ( 283 | f"Field '{param_name}' lacks type annotations" 284 | ) 285 | default_value = param.default if param.default is not inspect.Parameter.empty else Ellipsis 286 | wrapped_fields[param_name] = (param.annotation, default_value) 287 | 288 | # set the base Model and Config class 289 | if sources: 290 | 291 | class StaticSourceSettings(BaseSettings): 292 | # patch the config class so that pydantic functionality remains 293 | # the same, but the sources are properly initialized 294 | 295 | @classmethod 296 | def settings_customise_sources( 297 | cls, 298 | settings_cls: Type[BaseSettings], 299 | init_settings: PydanticBaseSettingsSource, 300 | env_settings: PydanticBaseSettingsSource, 301 | dotenv_settings: PydanticBaseSettingsSource, 302 | file_secret_settings: PydanticBaseSettingsSource, 303 | ): 304 | # cheeky way to harmonize the sources inside the config class: 305 | # this is needed to make sure that the config class is properly 306 | # initialized with the sources declared by the user on CLI init. 307 | # Env and file sources are discarded, the user must provide them explicitly. 308 | source_list = cast(List[SettingSourceCallable], sources) 309 | callables = [source(settings_cls) for source in source_list] 310 | return (init_settings, *callables) 311 | 312 | model_class = StaticSourceSettings if model_class is None else (model_class, StaticSourceSettings) 313 | 314 | cfg_class = create_model( # type: ignore 315 | "WrapperModel", 316 | **wrapped_fields, 317 | __base__=model_class, 318 | ) 319 | arguments = model_to_args(cfg_class, self._delimiter, self._internal_delimiter) 320 | 321 | command = Command( 322 | callback=f, 323 | arguments=arguments, 324 | model_class=cfg_class, 325 | name=command_name, 326 | description=command_help, 327 | singleton=singleton, 328 | stores=stores, 329 | delimiter=self._internal_delimiter, 330 | ) 331 | # add command to current CLI list and return it 332 | self.commands.append(command) 333 | return command 334 | 335 | return decorator 336 | 337 | def add_parser(self, parser: ParserType, name: Optional[str] = None) -> None: 338 | """ 339 | Add a subparser to the current parser. 340 | 341 | Args: 342 | parser (ArgParser): subparser to add. 343 | """ 344 | assert parser.name or name, "The given subparser must have a name" 345 | if name: 346 | parser.name = name 347 | self.groups.append(parser) 348 | -------------------------------------------------------------------------------- /argdantic/fields.py: -------------------------------------------------------------------------------- 1 | from typing import Any, Optional, Sequence 2 | 3 | from pydantic import Field 4 | 5 | 6 | def ArgField( 7 | *names: Optional[Sequence[str]], 8 | default: Optional[Any] = ..., 9 | description: Optional[str] = None, 10 | **extra: Any, 11 | ) -> Any: 12 | """Create a FieldInfo object with the given arguments. 13 | 14 | This is a convenience function for creating a FieldInfo object 15 | with the given arguments. It is used to create the default 16 | FieldInfo object for each field in a model. 17 | 18 | Args: 19 | *names (str, optional): Additional optional names for the current field. 20 | default (Any, optional): The default value of the argument, empty by default. 21 | description: The description of the argument. 22 | **extra: Extra keyword arguments, see the pydantic Field function for more info. 23 | 24 | Returns: 25 | A FieldInfo object with the given arguments. 26 | """ 27 | json_schema_extra = extra.pop("json_schema_extra", {}) 28 | json_schema_extra["names"] = names 29 | return Field(default, description=description, json_schema_extra=json_schema_extra, **extra) 30 | -------------------------------------------------------------------------------- /argdantic/parsing/__init__.py: -------------------------------------------------------------------------------- 1 | from argdantic.parsing.arguments import ( 2 | ActionTracker, 3 | Argument, 4 | ChoiceArgument, 5 | DictArgument, 6 | FlagArgument, 7 | MultiActionTracker, 8 | MultipleArgument, 9 | PrimitiveArgument, 10 | registry, 11 | ) 12 | 13 | __all__ = [ 14 | "ActionTracker", 15 | "Argument", 16 | "ChoiceArgument", 17 | "DictArgument", 18 | "FlagArgument", 19 | "MultipleArgument", 20 | "PrimitiveArgument", 21 | "MultiActionTracker", 22 | "registry", 23 | ] 24 | -------------------------------------------------------------------------------- /argdantic/parsing/actions.py: -------------------------------------------------------------------------------- 1 | from argparse import OPTIONAL, Action, ArgumentParser, Namespace, _copy_items # type: ignore 2 | from typing import Any, Iterable, Optional, Sequence, Union 3 | 4 | 5 | class StoreAction(Action): 6 | """ 7 | Store action for argparse. This class is used to store the value of an argument. 8 | This thin wrapper around the argparse Action is used to track if a field has been explicitly set or not. 9 | """ 10 | 11 | def __init__( 12 | self, 13 | option_strings: Sequence[str], 14 | dest: str, 15 | nargs: Optional[Union[int, str]] = None, 16 | **kwargs, 17 | ) -> None: 18 | super().__init__(option_strings=option_strings, dest=dest, nargs=nargs, **kwargs) 19 | self._specified = False 20 | 21 | def __call__( 22 | self, 23 | parser: ArgumentParser, 24 | namespace: Namespace, 25 | values: Optional[Union[Sequence[Any], str]], 26 | option_string: Optional[str] = None, 27 | ) -> None: 28 | setattr(namespace, self.dest, values) 29 | self._specified = True 30 | 31 | @property 32 | def specified(self) -> bool: 33 | return self._specified 34 | 35 | 36 | class StoreConstAction(StoreAction): 37 | def __init__( 38 | self, 39 | option_strings: Sequence[str], 40 | dest: str, 41 | const: Any, 42 | nargs: Union[int, str] = 0, 43 | **kwargs: dict, 44 | ) -> None: 45 | nargs = 0 46 | super().__init__(option_strings, dest, nargs=nargs, const=const, **kwargs) 47 | 48 | def __call__( 49 | self, 50 | parser: ArgumentParser, 51 | namespace: Namespace, 52 | values: Optional[Union[Sequence[Any], str]], 53 | option_string: Optional[str] = None, 54 | ) -> None: 55 | return super().__call__(parser, namespace, self.const, option_string) 56 | 57 | 58 | class StoreTrueAction(StoreConstAction): 59 | def __init__(self, option_strings: Sequence[str], dest: str, **kwargs: Any) -> None: 60 | super().__init__( 61 | option_strings, 62 | dest, 63 | const=True, 64 | **kwargs, 65 | ) 66 | 67 | 68 | class StoreFalseAction(StoreConstAction): 69 | def __init__(self, option_strings: Sequence[str], dest: str, **kwargs: Any) -> None: 70 | super().__init__( 71 | option_strings, 72 | dest, 73 | const=False, 74 | **kwargs, 75 | ) 76 | 77 | 78 | class AppendAction(StoreAction): 79 | def __init__( 80 | self, 81 | option_strings: Sequence[str], 82 | dest: str, 83 | nargs: Optional[Union[int, str]], 84 | const: Optional[Any] = None, 85 | **kargs: dict, 86 | ) -> None: 87 | if nargs == 0: 88 | raise ValueError("nargs for append actions must be > 0; if arg is optional, use const") 89 | if const is not None and nargs != OPTIONAL: 90 | raise ValueError("nargs must be %r to supply const" % OPTIONAL) 91 | super().__init__( 92 | option_strings=option_strings, 93 | dest=dest, 94 | nargs=nargs, 95 | const=const, 96 | **kargs, 97 | ) 98 | 99 | def __call__( 100 | self, 101 | parser: ArgumentParser, 102 | namespace: Namespace, 103 | values: Optional[Union[str, Sequence[Any]]], 104 | option_string: Optional[str] = None, 105 | ) -> None: 106 | items = getattr(namespace, self.dest, None) 107 | items = list(_copy_items(items)) 108 | assert isinstance(values, Iterable) 109 | items.extend(values) 110 | super().__call__(parser, namespace, items, option_string) 111 | -------------------------------------------------------------------------------- /argdantic/parsing/arguments.py: -------------------------------------------------------------------------------- 1 | import json 2 | from abc import ABC, abstractmethod 3 | from argparse import ArgumentParser, ArgumentTypeError 4 | from collections import abc, deque 5 | from enum import Enum 6 | from typing import ( 7 | Any, 8 | Dict, 9 | Iterator, 10 | List, 11 | Literal, 12 | Mapping, 13 | Optional, 14 | Sequence, 15 | Tuple, 16 | Type, 17 | Union, 18 | cast, 19 | get_args, 20 | get_origin, 21 | ) 22 | 23 | from argdantic.parsing.actions import ( 24 | Action, 25 | AppendAction, 26 | StoreAction, 27 | StoreFalseAction, 28 | StoreTrueAction, 29 | ) 30 | from argdantic.registry import Registry 31 | from argdantic.utils import is_container, type_name 32 | 33 | registry = Registry() 34 | 35 | cli_types: Dict[str, Tuple[Any, str]] = { 36 | "bool": (bool, "BOOL"), 37 | "int": (int, "INT"), 38 | "float": (float, "FLOAT"), 39 | "complex": (complex, "COMPLEX"), 40 | "bytes": (str.encode, "BYTES"), 41 | "str": (str, "TEXT"), 42 | "dict": (dict, "JSON"), 43 | "datetime": (str, "DATETIME"), 44 | "date": (str, "DATE"), 45 | "time": (str, "TIME"), 46 | "timedelta": (str, "TIMEDELTA"), 47 | "path": (str, "PATH"), 48 | "email": (str, "EMAIL"), 49 | } 50 | cli_default = (str, "TEXT") 51 | 52 | 53 | class ActionTracker: 54 | """ 55 | Action tracker for argparse actions. This class is used to track if an action has been 56 | specified or not. This is useful for determining if an argument has been set or not using the CLI. 57 | """ 58 | 59 | def __init__(self, action_class: Type[Action]) -> None: 60 | self.action_class = action_class 61 | self.action: Optional[Action] = None 62 | 63 | def __call__(self, option_strings: Sequence[str], dest: str, **kwargs) -> Any: 64 | self.action = self.action_class(option_strings, dest, **kwargs) 65 | return self.action 66 | 67 | def is_set(self) -> bool: 68 | return self.action is not None and self.action.specified # type: ignore 69 | 70 | 71 | class MultiActionTracker(ActionTracker): 72 | """ 73 | Multi action tracker for argparse actions. This class is used to track if an action has been 74 | specified or not. Compared to the ActionTracker, this class is used for actions that can be 75 | specified multiple times. 76 | """ 77 | 78 | def __init__(self, trackers: List[ActionTracker]) -> None: 79 | self.trackers = trackers 80 | 81 | def is_set(self) -> bool: 82 | return any(tracker.is_set() for tracker in self.trackers) 83 | 84 | 85 | class Argument(ABC): 86 | """ 87 | Base class for all argument types. This class is not meant to be used directly, but rather 88 | subclassed to create new argument types. 89 | """ 90 | 91 | def __init__( 92 | self, 93 | *field_names: str, 94 | identifier: str, 95 | field_type: Type[Any], 96 | default: Any = None, 97 | required: bool = True, 98 | description: Optional[str] = None, 99 | ) -> None: 100 | super().__init__() 101 | self.identifier = identifier 102 | self.field_names = field_names 103 | self.field_type = field_type 104 | self.default = default 105 | self.required = required 106 | self.description = description 107 | 108 | @abstractmethod 109 | def build(self, parser: ArgumentParser) -> ActionTracker: 110 | raise NotImplementedError # pragma: no cover 111 | 112 | def build_internal(self, parser: ArgumentParser, *, action: Type[Action], **optional_fields: Any) -> ActionTracker: 113 | tracker = ActionTracker(action) 114 | parser.add_argument( 115 | *self.field_names, 116 | dest=self.identifier, 117 | default=self.default, 118 | required=self.required, 119 | help=self.description, 120 | action=tracker, # type: ignore 121 | **optional_fields, 122 | ) 123 | return tracker 124 | 125 | 126 | class PrimitiveArgument(Argument): 127 | """ 128 | Argument for primitive types, assigned as default argument type 129 | """ 130 | 131 | def build(self, parser: ArgumentParser) -> ActionTracker: 132 | cli_type, cli_metavar = cli_types.get(type_name(self.field_type), cli_default) 133 | return super().build_internal( 134 | parser, 135 | action=StoreAction, 136 | type=cli_type, 137 | metavar=cli_metavar, 138 | ) 139 | 140 | 141 | @registry.register(bool) 142 | class FlagArgument(Argument): 143 | """ 144 | Argument for a boolean flag. If the flag is present, the value is True, otherwise False 145 | """ 146 | 147 | def build(self, parser: ArgumentParser) -> ActionTracker: 148 | # create a group with two mutually exclusive arguments 149 | group = parser.add_mutually_exclusive_group(required=self.required) 150 | # create a tracker for each argument, then create a multi-tracker to track both 151 | tracker_true = ActionTracker(StoreTrueAction) 152 | tracker_false = ActionTracker(StoreFalseAction) 153 | tracker = MultiActionTracker([tracker_true, tracker_false]) 154 | # add the arguments to the group and set the default value, if any 155 | negative_field_names = [f"--no-{name.lstrip('-')}" for name in self.field_names] 156 | group.add_argument( 157 | *self.field_names, 158 | dest=self.identifier, 159 | action=cast(Type[Action], tracker_true), 160 | help=self.description, 161 | ) # type: ignore 162 | group.add_argument( 163 | *negative_field_names, 164 | dest=self.identifier, 165 | action=cast(Type[Action], tracker_false), 166 | ) # type: ignore 167 | default = self.default if self.default is not None else False 168 | parser.set_defaults(**{self.identifier: default}) 169 | # return the multi-tracker 170 | return tracker 171 | 172 | 173 | @registry.register( 174 | list, 175 | tuple, 176 | set, 177 | frozenset, 178 | range, 179 | deque, 180 | abc.Sequence, 181 | abc.Iterable, 182 | ) 183 | class MultipleArgument(Argument): 184 | """ 185 | Argument that accepts multiple values. 186 | When the field type is a container, the inner type is used to determine the type of the argument. 187 | For example, a field type of List[int] will result in an argument that accepts multiple integers. 188 | """ 189 | 190 | def _type_and_count(self) -> Tuple[Type[Any], Union[str, int], Any]: 191 | inner_type = str 192 | arg_count: Union[str, int] = "+" if self.required else "*" 193 | metavar: Union[tuple, str] = cli_types[type_name(inner_type)] 194 | if is_container(self.field_type): 195 | # A non-composite type has a single argument, such as 'List[int]' 196 | # A composite type has a tuple of arguments, like 'Tuple[str, int, int]'. 197 | args = get_args(self.field_type) 198 | if len(args) == 1 or (len(args) == 2 and args[1] is Ellipsis): 199 | inner_type, metavar = cli_types.get(type_name(args[0]), cli_default) 200 | elif len(args) >= 2: 201 | arg_count = len(args) 202 | metavar = tuple([cli_types.get(type_name(arg), cli_default)[1] for arg in args]) 203 | return inner_type, arg_count, metavar 204 | 205 | def build(self, parser: ArgumentParser) -> ActionTracker: 206 | field_type, nargs, metavar = self._type_and_count() 207 | return super().build_internal( 208 | parser, 209 | action=AppendAction, 210 | type=field_type, 211 | nargs=nargs, 212 | metavar=metavar, 213 | ) 214 | 215 | 216 | @registry.register( 217 | Literal, 218 | Enum, 219 | ) 220 | class ChoiceArgument(Argument): 221 | """ 222 | ChoiceArgument is a special case of MultipleArgument that has a fixed number of choices. 223 | It supports both Enum and Literal types. Overriding the *contains* and *iter* methods allows 224 | to use the very same class as a custom choices argument for argparse. 225 | """ 226 | 227 | def __contains__(self, item: Any) -> bool: 228 | # The control is done after the `convert` method, 229 | # so the item is already a value or an Enum member. 230 | item_set = {i.value for i in self.field_type} 231 | key = item if self.value_only else item.value 232 | return key in item_set 233 | 234 | def __iter__(self) -> Iterator: 235 | return iter(self.field_type) # type: ignore 236 | 237 | def __next__(self) -> Any: 238 | return next(iter(self.field_type)) # type: ignore 239 | 240 | def __len__(self) -> int: 241 | return len(self.field_type) # type: ignore 242 | 243 | def __repr__(self) -> str: 244 | str_choices = [str(i.value) if self.value_only else i.name for i in self.field_type] 245 | return f"[{'|'.join(str_choices)}]" 246 | 247 | def convert(self, name: Any) -> Any: 248 | try: 249 | item = self.field_type[name] 250 | except KeyError: 251 | raise ArgumentTypeError(f"invalid choice: {name} (choose from {repr(self)})") 252 | if self.value_only: 253 | return item.value 254 | return item 255 | 256 | def build(self, parser: ArgumentParser) -> ActionTracker: 257 | self.value_only = False 258 | if get_origin(self.field_type) is Literal: 259 | self.field_type = Enum(self.identifier, {str(v): v for v in get_args(self.field_type)}) # type: ignore 260 | self.value_only = True 261 | return super().build_internal( 262 | parser, 263 | action=StoreAction, 264 | type=self.convert, 265 | metavar=repr(self), 266 | choices=self, 267 | ) 268 | 269 | 270 | @registry.register( 271 | dict, 272 | Dict, 273 | Mapping, 274 | ) 275 | class DictArgument(Argument): 276 | """ 277 | Argument for a dictionary type. The value is a JSON string. 278 | """ 279 | 280 | def build(self, parser: ArgumentParser) -> ActionTracker: 281 | _, metavar = cli_types[type_name(dict)] 282 | return super().build_internal( 283 | parser, 284 | action=StoreAction, 285 | type=json.loads, 286 | metavar=metavar, 287 | ) 288 | -------------------------------------------------------------------------------- /argdantic/py.typed: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/edornd/argdantic/9e9d7340d4789ba2617baf323504b8fd14396a99/argdantic/py.typed -------------------------------------------------------------------------------- /argdantic/registry.py: -------------------------------------------------------------------------------- 1 | from collections.abc import MutableMapping 2 | from typing import Any, Dict, Iterator, Type, Union, get_origin 3 | 4 | 5 | class Registry(MutableMapping): 6 | """Simple class registry for mapping types and their argument handlers.""" 7 | 8 | def __init__(self) -> None: 9 | self.store: Dict[type, Any] = dict() 10 | 11 | def __getitem__(self, key: type) -> Any: 12 | # do not allow Union types (unless they are Optional, handled in conversion) 13 | if get_origin(key) is Union: 14 | raise ValueError("Union types are not supported, please specify a single type.") 15 | try: 16 | hierarchy = key.mro()[:-1] 17 | # avoid look-up errors for non-classes (Literals, etc.) 18 | except AttributeError: 19 | origin = get_origin(key) 20 | hierarchy = [origin] if origin else [key] 21 | for type_class in hierarchy: 22 | if type_class in self.store: 23 | return self.store[type_class] 24 | 25 | def __setitem__(self, key: type, value: Any) -> None: 26 | return self.store.__setitem__(key, value) 27 | 28 | def __delitem__(self, key: type) -> None: 29 | return self.store.__delitem__(key) 30 | 31 | def __iter__(self) -> Iterator[Any]: 32 | return self.store.__iter__() 33 | 34 | def __len__(self) -> int: 35 | return self.store.__len__() 36 | 37 | def get(self, key: type, default: Any = None) -> Any: 38 | value = self[key] or default 39 | return value 40 | 41 | def register(self, *keys: Any): 42 | assert keys is not None and len(keys) > 0, "Keys required!" 43 | 44 | def decorator(cls: Type[Any]): 45 | for key in keys: 46 | self.store[key] = cls 47 | return cls 48 | 49 | return decorator 50 | -------------------------------------------------------------------------------- /argdantic/sources/__init__.py: -------------------------------------------------------------------------------- 1 | from argdantic.sources.base import EnvSettingsSource, SecretsSettingsSource 2 | from argdantic.sources.dynamic import DEFAULT_SOURCE_FIELD, from_file 3 | from argdantic.sources.json import JsonFileLoader, JsonSettingsSource 4 | from argdantic.sources.toml import TomlFileLoader, TomlSettingsSource 5 | from argdantic.sources.yaml import YamlFileLoader, YamlSettingsSource 6 | 7 | __all__ = [ 8 | "from_file", 9 | "DEFAULT_SOURCE_FIELD", 10 | "EnvSettingsSource", 11 | "SecretsSettingsSource", 12 | "JsonSettingsSource", 13 | "TomlSettingsSource", 14 | "YamlSettingsSource", 15 | "JsonFileLoader", 16 | "TomlFileLoader", 17 | "YamlFileLoader", 18 | ] 19 | -------------------------------------------------------------------------------- /argdantic/sources/base.py: -------------------------------------------------------------------------------- 1 | import os 2 | from abc import ABC, abstractmethod 3 | from pathlib import Path 4 | from typing import Dict, Mapping, Optional, Type, Union, cast 5 | 6 | from pydantic_settings import BaseSettings, PydanticBaseSettingsSource 7 | from pydantic_settings.sources import DotEnvSettingsSource as PydanticEnvSource 8 | from pydantic_settings.sources import DotenvType 9 | from pydantic_settings.sources import SecretsSettingsSource as PydanticSecretsSource 10 | 11 | 12 | class SettingsSourceBuilder(ABC): 13 | """ 14 | An argdantic source is a callable object that takes an input settings object 15 | and returns an actual source, required to postpone the init of default sources. 16 | """ 17 | 18 | @abstractmethod 19 | def __call__(self, settings_cls: Type[BaseSettings]) -> PydanticBaseSettingsSource: 20 | raise NotImplementedError # pragma: no cover 21 | 22 | 23 | class FileSettingsSourceBuilder(SettingsSourceBuilder): 24 | """ 25 | A file source builder is a callable object that takes an input file path 26 | to read settings from, and returns an instance of settings source that 27 | can be used to populate to a pydantic model. 28 | """ 29 | 30 | def __init__(self, path: Union[str, Path]) -> None: 31 | self.path = Path(path) 32 | 33 | 34 | class FileBaseSettingsSource(PydanticBaseSettingsSource): 35 | """ 36 | Abstract settings source that expects an extra path, together with the settings class. 37 | """ 38 | 39 | def __init__(self, settings_cls: Type[BaseSettings], path: Union[str, Path]) -> None: 40 | super().__init__(settings_cls) 41 | self.path = Path(path) 42 | 43 | 44 | class PydanticMultiEnvSource(PydanticEnvSource): 45 | """ 46 | A pydantic settings source that loads settings from multiple environment sources. 47 | This loads from both the environment variables and the dotenv file. 48 | """ 49 | 50 | def _load_env_vars(self) -> Mapping[str, Union[str, None]]: 51 | if self.case_sensitive: 52 | env_vars = cast(Dict[str, str], os.environ) 53 | else: 54 | self.env_prefix: str = self.env_prefix.lower() 55 | env_vars = {k.lower(): v for k, v in os.environ.items()} 56 | # filter out env vars that are not fields in the settings class 57 | valid_vars = {} 58 | for field_name in self.settings_cls.model_fields: 59 | expected = f"{self.env_prefix}{field_name}" 60 | # keep them with the prefix, it will be removed later 61 | if expected in env_vars: 62 | valid_vars[expected] = env_vars[expected] 63 | add_vars = super()._load_env_vars() 64 | valid_vars.update(cast(dict, add_vars)) 65 | return valid_vars 66 | 67 | 68 | class EnvSettingsSource(SettingsSourceBuilder): 69 | """ 70 | Reads settings from environment variables. 71 | This class inherits from the pydantic EnvSettingsSource class to fully customize input sources. 72 | """ 73 | 74 | def __init__( 75 | self, 76 | env_file: Optional[DotenvType], 77 | env_file_encoding: Optional[str] = "utf-8", 78 | env_nested_delimiter: Optional[str] = "__", 79 | env_prefix: str = "", 80 | env_case_sensitive: bool = False, 81 | ): 82 | self.env_file = env_file 83 | self.env_file_encoding = env_file_encoding 84 | self.env_nested_delimiter = env_nested_delimiter 85 | self.env_prefix = env_prefix 86 | self.env_case_sensitive = env_case_sensitive 87 | 88 | def __call__(self, settings_cls: Type[BaseSettings]) -> PydanticBaseSettingsSource: 89 | return PydanticMultiEnvSource( 90 | settings_cls=settings_cls, 91 | env_file=self.env_file, 92 | env_file_encoding=self.env_file_encoding, 93 | case_sensitive=self.env_case_sensitive, 94 | env_prefix=self.env_prefix, 95 | env_nested_delimiter=self.env_nested_delimiter, 96 | ) 97 | 98 | 99 | class SecretsSettingsSource(SettingsSourceBuilder): 100 | """Reads secrets from the given directory. 101 | This class inherits from the pydantic SecretsSettingsSource class to fully customize input sources. 102 | """ 103 | 104 | def __init__(self, secrets_dir: Optional[Union[str, Path]], case_sensitive: bool = False, env_prefix: str = ""): 105 | self.secrets_dir = secrets_dir 106 | self.case_sensitive = case_sensitive 107 | self.env_prefix = env_prefix 108 | 109 | def __call__(self, settings_cls: Type[BaseSettings]) -> PydanticBaseSettingsSource: 110 | return PydanticSecretsSource( 111 | settings_cls=settings_cls, 112 | secrets_dir=self.secrets_dir, 113 | case_sensitive=self.case_sensitive, 114 | env_prefix=self.env_prefix, 115 | ) 116 | -------------------------------------------------------------------------------- /argdantic/sources/dynamic.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | from typing import Any, Dict, Optional, Tuple, Type, cast 3 | 4 | from pydantic import BaseModel 5 | from pydantic_settings import BaseSettings, InitSettingsSource, PydanticBaseSettingsSource 6 | 7 | from argdantic.sources.base import FileBaseSettingsSource 8 | from argdantic.utils import is_mapping 9 | 10 | DEFAULT_SOURCE_FIELD = "_source" 11 | 12 | 13 | class DynamicFileSource(PydanticBaseSettingsSource): 14 | """ 15 | Source class for loading values provided during settings class initialization. 16 | """ 17 | 18 | def __init__( 19 | self, 20 | settings_cls: Type[BaseSettings], 21 | source_cls: Type[FileBaseSettingsSource], 22 | init_kwargs: Dict[str, Any], 23 | required: bool, 24 | field_name: Optional[str] = None, 25 | ): 26 | super().__init__(settings_cls) 27 | self.init_kwargs = init_kwargs 28 | self.field_name = field_name or DEFAULT_SOURCE_FIELD 29 | if self.field_name not in init_kwargs: 30 | self.source = None 31 | else: 32 | self.source = source_cls(settings_cls, init_kwargs[self.field_name]) 33 | 34 | def get_field_value(self, field: Any, field_name: str) -> Tuple[Any, str, bool]: 35 | # Nothing to do here. Only implement the return statement to make mypy happy 36 | return None, "", False # pragma: no cover 37 | 38 | def _update_recursive(self, dict_a: Dict[str, Any], dict_b: Dict[str, Any]) -> Dict[str, Any]: 39 | # update dict_a with dict_b recursively 40 | # for each key in dict_b, if the key is in dict_a, update the value 41 | # if the value is a mapping, update it recursively 42 | # if the key is not in dict_a, add it 43 | for key, value in dict_b.items(): 44 | if key in dict_a: 45 | if is_mapping(type(value)): 46 | dict_a[key] = self._update_recursive(dict_a[key], value) 47 | else: 48 | dict_a[key] = value 49 | else: 50 | dict_a[key] = value 51 | return dict_a 52 | 53 | def __call__(self) -> Dict[str, Any]: 54 | if self.source is not None: 55 | main_kwargs = self.source() 56 | kwargs = self._update_recursive(main_kwargs, self.init_kwargs) 57 | 58 | # remove the source field if it is the default one 59 | if self.field_name == DEFAULT_SOURCE_FIELD: 60 | kwargs.pop(self.field_name) 61 | return kwargs 62 | return self.init_kwargs 63 | 64 | def __repr__(self) -> str: 65 | return f"DynamicFileSource(source={self.source!r})" 66 | 67 | 68 | def from_file( 69 | loader: Type[FileBaseSettingsSource], 70 | use_field: Optional[str] = None, 71 | required: bool = True, 72 | ): 73 | def decorator(cls): 74 | if not issubclass(cls, BaseModel): 75 | raise TypeError("@from_file can only be applied to Pydantic models") 76 | if use_field is not None: 77 | if use_field not in cls.model_fields: 78 | raise ValueError(f"Field {use_field} not found in model {cls.__name__}") 79 | field_annotation = cls.model_fields[use_field].annotation 80 | if not issubclass(field_annotation, (str, Path)): 81 | raise ValueError(f"Field {use_field} must be a string or Path to be used as file source") 82 | 83 | class DynamicSourceSettings(cls, BaseSettings): 84 | # required to eventually add a cli argument to the model 85 | # if cli_field is None, an additional argument will be added 86 | __arg_source_field__ = use_field 87 | __arg_source_required__ = required 88 | # model_config = ConfigDict(extra="ignore") 89 | 90 | @classmethod 91 | def settings_customise_sources( 92 | cls, 93 | settings_cls: Type[BaseSettings], 94 | init_settings: PydanticBaseSettingsSource, 95 | env_settings: PydanticBaseSettingsSource, 96 | dotenv_settings: PydanticBaseSettingsSource, 97 | file_secret_settings: PydanticBaseSettingsSource, 98 | ) -> Tuple[PydanticBaseSettingsSource, ...]: 99 | source = DynamicFileSource( 100 | settings_cls, 101 | loader, 102 | cast(InitSettingsSource, init_settings).init_kwargs, 103 | required, 104 | use_field, 105 | ) 106 | return (source,) 107 | 108 | return DynamicSourceSettings 109 | 110 | return decorator 111 | -------------------------------------------------------------------------------- /argdantic/sources/json.py: -------------------------------------------------------------------------------- 1 | from typing import Any, Dict, Tuple, Type 2 | 3 | from pydantic.fields import FieldInfo 4 | from pydantic_settings import BaseSettings, PydanticBaseSettingsSource 5 | 6 | from argdantic.sources.base import FileBaseSettingsSource, FileSettingsSourceBuilder 7 | 8 | 9 | class JsonFileLoader(FileBaseSettingsSource): 10 | """ 11 | Class internal to pydantic-settings that reads settings from a JSON file. 12 | This gets spawned by the JsonSettingsSource class. 13 | """ 14 | 15 | def get_field_value(self, field: FieldInfo, field_name: str) -> Tuple[Any, str, bool]: 16 | return None, field_name, False # pragma: no cover 17 | 18 | def __call__(self) -> Dict[str, Any]: 19 | try: 20 | import orjson as json 21 | 22 | return json.loads(self.path.read_bytes()) 23 | except ImportError: 24 | import json # type: ignore 25 | 26 | return json.load(self.path.open()) # type: ignore 27 | 28 | 29 | class JsonSettingsSource(FileSettingsSourceBuilder): 30 | """ 31 | A JSON file settings source reads settings from a JSON file. 32 | Orjson is used if available, otherwise the standard json module is used. 33 | """ 34 | 35 | def __call__(self, settings: Type[BaseSettings]) -> PydanticBaseSettingsSource: 36 | return JsonFileLoader(settings, self.path) 37 | 38 | def __repr__(self) -> str: 39 | return f"" 40 | -------------------------------------------------------------------------------- /argdantic/sources/toml.py: -------------------------------------------------------------------------------- 1 | from typing import Any, Dict, Tuple, Type 2 | 3 | from pydantic.fields import FieldInfo 4 | from pydantic_settings import BaseSettings, PydanticBaseSettingsSource 5 | 6 | from argdantic.sources.base import FileBaseSettingsSource, FileSettingsSourceBuilder 7 | 8 | 9 | class TomlFileLoader(FileBaseSettingsSource): 10 | """ 11 | Class internal to pydantic-settings that reads settings from a TOML file. 12 | This gets spawned by the TomlSettingsSource class. 13 | """ 14 | 15 | def get_field_value(self, field: FieldInfo, field_name: str) -> Tuple[Any, str, bool]: 16 | return None, field_name, False # pragma: no cover 17 | 18 | def __call__(self) -> Dict[str, Any]: 19 | try: 20 | import tomli 21 | except ImportError: 22 | raise ImportError( 23 | "You need to install TOML libraries to use the TOML source. " 24 | "You can do so by running `pip install argdantic[toml]`." 25 | ) 26 | with open(self.path, mode="rb") as f: 27 | return tomli.load(f) 28 | 29 | 30 | class TomlSettingsSource(FileSettingsSourceBuilder): 31 | """ 32 | A TOML file settings source reads settings from a TOML file. 33 | """ 34 | 35 | def __call__(self, settings: Type[BaseSettings]) -> PydanticBaseSettingsSource: 36 | return TomlFileLoader(settings, self.path) 37 | 38 | def __repr__(self) -> str: 39 | return f"" 40 | -------------------------------------------------------------------------------- /argdantic/sources/yaml.py: -------------------------------------------------------------------------------- 1 | from typing import Any, Dict, Tuple, Type 2 | 3 | from pydantic.fields import FieldInfo 4 | from pydantic_settings import BaseSettings, PydanticBaseSettingsSource 5 | 6 | from argdantic.sources.base import FileBaseSettingsSource, FileSettingsSourceBuilder 7 | 8 | 9 | class YamlFileLoader(FileBaseSettingsSource): 10 | """ 11 | Class internal to pydantic-settings that reads settings from a YAML file. 12 | This gets spawned by the YamlSettingsSource class. 13 | """ 14 | 15 | def get_field_value(self, field: FieldInfo, field_name: str) -> Tuple[Any, str, bool]: 16 | return None, field_name, False # pragma: no cover 17 | 18 | def __call__(self) -> Dict[str, Any]: 19 | try: 20 | import yaml 21 | except ImportError: 22 | raise ImportError( 23 | "You need to install YAML dependencies to use the YAML source. " 24 | "You can do so by running `pip install argdantic[yaml]`." 25 | ) 26 | return yaml.safe_load(self.path.read_text()) 27 | 28 | 29 | class YamlSettingsSource(FileSettingsSourceBuilder): 30 | """ 31 | A YAML file settings source reads settings from a YAML file. 32 | If the PyYAML library is not installed, an error is raised. 33 | """ 34 | 35 | def __call__(self, settings: Type[BaseSettings]) -> PydanticBaseSettingsSource: 36 | return YamlFileLoader(settings, self.path) 37 | 38 | def __repr__(self) -> str: 39 | return f"" 40 | -------------------------------------------------------------------------------- /argdantic/stores/__init__.py: -------------------------------------------------------------------------------- 1 | from argdantic.stores.base import SettingsStoreCallable 2 | from argdantic.stores.json import JsonSettingsStore 3 | from argdantic.stores.toml import TomlSettingsStore 4 | from argdantic.stores.yaml import YamlSettingsStore 5 | 6 | __all__ = [ 7 | "JsonSettingsStore", 8 | "SettingsStoreCallable", 9 | "TomlSettingsStore", 10 | "YamlSettingsStore", 11 | ] 12 | -------------------------------------------------------------------------------- /argdantic/stores/base.py: -------------------------------------------------------------------------------- 1 | from abc import ABC, abstractmethod 2 | from pathlib import Path 3 | from typing import Callable, Literal, Optional, Set, Union 4 | 5 | from pydantic import BaseModel 6 | from pydantic_settings import BaseSettings 7 | 8 | SettingsStoreCallable = Callable[[Union[BaseSettings, BaseModel]], None] 9 | 10 | 11 | class BaseSettingsStore(ABC): 12 | """ 13 | A settings store is a callable object that takes an input file path 14 | and stores settings to it in a specific format, given by the implementation. 15 | """ 16 | 17 | def __init__( 18 | self, 19 | path: Union[str, Path], 20 | *, 21 | mode: Literal["python", "json"] = "python", 22 | encoding: str = "utf-8", 23 | include: Optional[Set[str]] = None, 24 | exclude: Optional[Set[str]] = None, 25 | by_alias: bool = False, 26 | exclude_unset: bool = False, 27 | exclude_defaults: bool = False, 28 | exclude_none: bool = False, 29 | ) -> None: 30 | self.path = Path(path) 31 | self.mode = mode 32 | self.encoding = encoding 33 | self.include = include 34 | self.exclude = exclude 35 | self.by_alias = by_alias 36 | self.exclude_unset = exclude_unset 37 | self.exclude_defaults = exclude_defaults 38 | self.exclude_none = exclude_none 39 | 40 | @abstractmethod 41 | def __call__(self, settings: BaseSettings) -> None: 42 | raise NotImplementedError # pragma: no cover 43 | -------------------------------------------------------------------------------- /argdantic/stores/json.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | from typing import Optional, Set, Union 3 | 4 | from pydantic_settings import BaseSettings 5 | 6 | from argdantic.stores.base import BaseSettingsStore 7 | 8 | 9 | class JsonSettingsStore(BaseSettingsStore): 10 | """ 11 | A JSON file settings store writes settings to a JSON file. 12 | Orjson is used if available, otherwise the standard json module is used. 13 | """ 14 | 15 | def __init__( 16 | self, 17 | path: Union[str, Path], 18 | *, 19 | encoding: str = "utf-8", 20 | include: Optional[Set[str]] = None, 21 | exclude: Optional[Set[str]] = None, 22 | by_alias: bool = False, 23 | exclude_unset: bool = False, 24 | exclude_defaults: bool = False, 25 | exclude_none: bool = False, 26 | ) -> None: 27 | super().__init__( 28 | path, 29 | mode="json", 30 | encoding=encoding, 31 | include=include, 32 | exclude=exclude, 33 | by_alias=by_alias, 34 | exclude_unset=exclude_unset, 35 | exclude_defaults=exclude_defaults, 36 | exclude_none=exclude_none, 37 | ) 38 | 39 | def __call__(self, settings: BaseSettings) -> None: 40 | with self.path.open("wb") as f: 41 | text = settings.model_dump_json( 42 | include=self.include, 43 | exclude=self.exclude, 44 | by_alias=self.by_alias, 45 | exclude_defaults=self.exclude_defaults, 46 | exclude_unset=self.exclude_unset, 47 | exclude_none=self.exclude_none, 48 | ) 49 | f.write(text.encode(self.encoding)) 50 | 51 | def __repr__(self) -> str: 52 | return f"" 53 | -------------------------------------------------------------------------------- /argdantic/stores/toml.py: -------------------------------------------------------------------------------- 1 | from pydantic_settings import BaseSettings 2 | 3 | from argdantic.stores.base import BaseSettingsStore 4 | 5 | 6 | class TomlSettingsStore(BaseSettingsStore): 7 | """ 8 | A TOML file settings store writes settings to a TOML file. 9 | Tomli is used if available, otherwise the standard toml module is used. 10 | """ 11 | 12 | def __call__(self, settings: BaseSettings) -> None: 13 | try: 14 | import toml 15 | except ImportError: 16 | raise ImportError( 17 | "You need to install TOML dependencies to use the TOML source. " 18 | "You can do so by running `pip install argdantic[toml]`." 19 | ) 20 | 21 | with self.path.open("wb") as f: 22 | text = toml.dumps( 23 | settings.model_dump( 24 | mode=self.mode, 25 | include=self.include, 26 | exclude=self.exclude, 27 | by_alias=self.by_alias, 28 | exclude_defaults=self.exclude_defaults, 29 | exclude_unset=self.exclude_unset, 30 | exclude_none=self.exclude_none, 31 | ) 32 | ) 33 | f.write(text.encode(self.encoding)) 34 | 35 | def __repr__(self) -> str: 36 | return f"" 37 | -------------------------------------------------------------------------------- /argdantic/stores/yaml.py: -------------------------------------------------------------------------------- 1 | from pydantic_settings import BaseSettings 2 | 3 | from argdantic.stores.base import BaseSettingsStore 4 | 5 | 6 | class YamlSettingsStore(BaseSettingsStore): 7 | """ 8 | A YAML file settings store writes settings to a YAML file. 9 | PyYAML is used if available, otherwise the standard yaml module is used. 10 | """ 11 | 12 | def __call__(self, settings: BaseSettings) -> None: 13 | try: 14 | import yaml 15 | # exception actually tested, but coverage does not detect it 16 | except ImportError: # pragma: no cover 17 | raise ImportError( 18 | "You need to install YAML dependencies to use the YAML store. " 19 | "You can do so by running `pip install argdantic[yaml]`." 20 | ) 21 | 22 | with self.path.open("w") as f: 23 | data = settings.model_dump( 24 | mode=self.mode, 25 | include=self.include, 26 | exclude=self.exclude, 27 | by_alias=self.by_alias, 28 | exclude_defaults=self.exclude_defaults, 29 | exclude_unset=self.exclude_unset, 30 | exclude_none=self.exclude_none, 31 | ) 32 | yaml.safe_dump(data, f, encoding="utf-8", allow_unicode=True) 33 | 34 | def __repr__(self) -> str: 35 | return f"" 36 | -------------------------------------------------------------------------------- /argdantic/testing.py: -------------------------------------------------------------------------------- 1 | import sys 2 | from argparse import Namespace 3 | from typing import Any, Optional 4 | 5 | from argdantic import ArgParser 6 | 7 | 8 | class Result: 9 | def __init__( 10 | self, 11 | return_value: Any, 12 | exception: Optional[Exception], 13 | exc_info: Optional[Any], 14 | ) -> None: 15 | self.return_value = return_value 16 | self.exception = exception 17 | self.exc_info = exc_info 18 | 19 | 20 | class CLIRunner: 21 | def __init__(self, catch_exceptions: bool = True) -> None: 22 | self.catch_exceptions = catch_exceptions 23 | 24 | def invoke(self, cli: ArgParser, args: Optional[Namespace]) -> Any: 25 | exception = None 26 | exc_info = None 27 | result = None 28 | try: 29 | result = cli(args=args) 30 | # avoid early exit on help invocation 31 | except SystemExit: 32 | pass 33 | # avoid early exit on exceptions 34 | except Exception as e: 35 | if not self.catch_exceptions: 36 | raise e 37 | exception = e 38 | exc_info = sys.exc_info() 39 | return Result( 40 | return_value=result, 41 | exception=exception, 42 | exc_info=exc_info, 43 | ) 44 | -------------------------------------------------------------------------------- /argdantic/utils.py: -------------------------------------------------------------------------------- 1 | from typing import Any, Container, Mapping, Optional, Sequence, Type, Union, get_args, get_origin 2 | 3 | from pydantic.v1.utils import lenient_issubclass 4 | 5 | 6 | def type_name(field_type: Type[Any]) -> str: 7 | """Returns the name of the type, or the name of the type's origin, if the type is a 8 | typing construct. 9 | Args: 10 | field_type (type): pydantic field type 11 | Returns: 12 | str: name of the type 13 | """ 14 | origin = get_origin(field_type) 15 | if origin is not None: 16 | name = origin.__name__ 17 | else: 18 | name = field_type.__name__ 19 | return name.lower() 20 | 21 | 22 | def is_multiple(field_type: Type[Any]) -> bool: 23 | """Checks whether the current type is a container type ('contains' other types), like 24 | lists and tuples. 25 | Args: 26 | field_type (type): pydantic field type 27 | # Returns: 28 | bool: true if a container, false otherwise 29 | """ 30 | # do not consider strings or byte arrays as containers 31 | if field_type in (str, bytes): 32 | return False 33 | # Early out for standard containers: list, tuple, range 34 | if lenient_issubclass(field_type, Sequence): 35 | return True 36 | origin = get_origin(field_type) 37 | # Early out for non-typing objects 38 | if origin is None: 39 | return False 40 | return lenient_issubclass(origin, Sequence) 41 | 42 | 43 | def is_mapping(field_type: Type[Any]) -> bool: 44 | """Checks whether this field represents a dictionary or JSON object. 45 | Args: 46 | field_type (type): pydantic type 47 | Returns: 48 | bool: true when the field is a dict-like object, false otherwise. 49 | """ 50 | # Early out for standard containers. 51 | if lenient_issubclass(field_type, Mapping): 52 | return True 53 | # for everything else or when the typing is more complex, check its origin 54 | origin = get_origin(field_type) 55 | if origin is None: 56 | return False 57 | return lenient_issubclass(origin, Mapping) 58 | 59 | 60 | def is_container(field_type: Type[Any]) -> bool: 61 | """Checks whether the current type is a container type ('contains' other types), like 62 | lists and tuples. 63 | Args: 64 | field_type (type): pydantic field type 65 | Returns: 66 | bool: true if a container, false otherwise 67 | """ 68 | # do not consider strings or byte arrays as containers 69 | if field_type in (str, bytes): 70 | return False 71 | # Early out for standard containers: list, tuple, range 72 | if lenient_issubclass(field_type, Container): 73 | return True 74 | origin = get_origin(field_type) 75 | # Early out for non-typing objects 76 | if origin is not None: 77 | return lenient_issubclass(origin, Container) 78 | return False 79 | 80 | 81 | def is_typing(field_type: Type[Any]) -> bool: 82 | """Checks whether the current type is a module-like type. 83 | Args: 84 | field_type (type): pydantic field type 85 | Returns: 86 | bool: true if the type is itself a type 87 | """ 88 | raw = get_origin(field_type) 89 | if raw is None: 90 | return False 91 | return raw is type or raw is Type 92 | 93 | 94 | def is_optional(field_type: Optional[Type[Any]]) -> bool: 95 | """Checks whether the current type is an optional type. 96 | Args: 97 | field_type (type): pydantic field type 98 | Returns: 99 | bool: true if the type is optional, false otherwise 100 | """ 101 | return get_origin(field_type) is Union and type(None) in get_args(field_type) 102 | 103 | 104 | def get_optional_type(field_type: Optional[Type[Any]]) -> Type[Any]: 105 | """Returns the type of the optional field. 106 | Args: 107 | field_type (type): pydantic field type 108 | Returns: 109 | Type[Any]: the type of the field 110 | """ 111 | return get_args(field_type)[0] 112 | -------------------------------------------------------------------------------- /argdantic/version.py: -------------------------------------------------------------------------------- 1 | __version__ = "1.3.3" 2 | -------------------------------------------------------------------------------- /docs/guide/composition.md: -------------------------------------------------------------------------------- 1 | # Composition 2 | 3 | Exploiting both _pydantic_ and _argparse_ functionality, 4 | `argdantic` allows two types of composition: **nested models** and **nested parsers**. 5 | The first allows to define complex inputs, building a hierarchy of models and submodels. 6 | The second allows to organize your code into a series of commands and subcommands, each with its own set of arguments. 7 | 8 | ## Nested Models 9 | 10 | Strictly speaking, every argument stated in the signature of a `@command` function is wrapped into a _pydantic_ model. 11 | This allows two things: first, it makes it easier and more natural for the user to define input arguments, and second, 12 | it allows to define complex inputs, building a hierarchy of models and submodels, directly exploiting the powerful features of _pydantic_. 13 | 14 | For example, let's say we want to define a command that takes an input `Item`, which in turn contains an `Image` model. 15 | We can do this by defining two models, and then using the `Item` model as an argument of the `@command` function: 16 | 17 | ```python title="nested_models.py" linenums="1" hl_lines="8-10 18" 18 | {!examples/composition/nested_models.py!} 19 | ``` 20 | 21 | Underneath, `argdantic` will automatically create the following structure: 22 | 23 | - A nameless root model, inheriting from `BaseConfig` if any extra feature is enabled, and containing: 24 | * A field `item`, of type `Item`, which defines: 25 | * A field `name`, of type `str`, 26 | * A field `description`, of type `str`, 27 | * A field `price`, of type `float`, 28 | * A field `tags`, of type `Set[str]`, 29 | * A field `image`, of type `Image`, which defines: 30 | * A field `url`, of type `str`, 31 | * A field `name`, of type `str` 32 | 33 | The resulting command line interface, with the help message, will be the following: 34 | 35 | ```console 36 | $ python nested_models.py --help 37 | > usage: nested_models.py [-h] --item.name TEXT --item.description TEXT --item.price FLOAT --item.tags TEXT [TEXT ...] --item.image.url TEXT --item.image.name TEXT 38 | > 39 | > optional arguments: 40 | > -h, --help show this help message and exit 41 | > --item.name TEXT (required) 42 | > --item.description TEXT 43 | > --item.price FLOAT (required) 44 | > --item.tags [TEXT [TEXT ...]] (default: set()) 45 | > --item.image.url TEXT (required) 46 | > --item.image.name TEXT (required) 47 | ``` 48 | 49 | !!! note 50 | 51 | This argument wrapping behaviour is automated by default to make the command definition as natural as possible, 52 | however it is possible to define a custom root model by using the `sigleton` keyword argument 53 | of the `@command` decorator (See, [] ) 54 | 55 | Executing the command with the required arguments will result in the following output: 56 | 57 | ```console 58 | $ python nested_models.py --item.name "My Item" \ 59 | --item.description "My Item Description" \ 60 | --item.price 10.0 \ 61 | --item.tags "tag1" "tag2" \ 62 | --item.image.url "https://example.com/image.png" \ 63 | --item.image.name "My Image" 64 | > name='My Item' description='My Item Description' price=10.0 tags={'tag1', 'tag2'} image=Image(url='https://example.com/image.png' name='My Image') 65 | ``` 66 | 67 | !!! note 68 | 69 | Despite that the `Image` model defaults to `None`, you will notice that its fields are still required. 70 | Strictly speaking, that's the correct behavior, since these fields are not optional. This would have also happened 71 | if the `image` field had an explicit `Image()` default value. 72 | 73 | This is a very simple example, but it shows how to define complex inputs, and how to exploit the power of _pydantic_ to define 74 | a hierarchy of models. In fact, you can define as many levels of nesting as you want, building a complex configuration 75 | that can be easily validated and parsed. 76 | Nested configurations are also supported using different input sources, such as environment variables and configuration files: 77 | see the [Input Sources](../sources) section for more details. 78 | 79 | ### Singleton Configurations 80 | 81 | Sometimes it may be useful to define a single configuration object manually, and then use it as the main 82 | input argument of a command. For instance, imagine a machine learning pipeline with a single `config` object, 83 | that can be customized from command line, passed to each step of the pipeline, and then dumped to a file for future reference. 84 | 85 | This can be done by defining a custom model, and then by simply activating the `singleton` keyword argument of the `@command` decorator: 86 | 87 | ```python title="singleton_config.py" linenums="1" hl_lines="19" 88 | {!examples/composition/singleton.py!} 89 | ``` 90 | 91 | Argdantic will then use the defined argument as the root model, without wrapping it into a new one. This has the added 92 | benefit of removing the top-level name from the CLI fields, which would be all the same in this case. 93 | Note the absence of the `item` name in front of the following fields: 94 | 95 | ```console 96 | $ python singleton.py --help 97 | > usage: test.py [-h] --name TEXT --description TEXT --image.name TEXT 98 | > 99 | > optional arguments: 100 | > -h, --help show this help message and exit 101 | > --name TEXT (required) 102 | > --description TEXT (required) 103 | > --image.name TEXT (required) 104 | ``` 105 | 106 | !!! warning 107 | 108 | The `singleton` configuration setup only works when two requirements are met: first, **only one argument** must be 109 | defined in the signature of the `@command` function, and second, **that argument must be a _pydantic_ model**. 110 | Failure to meet these requirements will result in an `AssertionError` being raised. 111 | 112 | ## Nested Parsers 113 | 114 | `argdantic` also allows to organize your code into a series of commands and subcommands, each with its own set of arguments. 115 | A single parser is enough to define a list of commands at the same level. 116 | However, sometimes it is necessary to define a hierarchy of commands, such as `git commit` and `git push`. 117 | 118 | This can be done by defining multiple parsers, each with its own set of commands, and then merging them together, like so: 119 | 120 | ```python title="nested_parsers.py" linenums="1" hl_lines="3 4 31-33" 121 | {!examples/composition/nested_parsers.py!} 122 | ``` 123 | 124 | There are a few things to notice here: 125 | 126 | - The subparsers _must_ have a name, which is used to identify them when calling the CLI. 127 | This can be provided by either providing a `name` during instantiation, or by passing a `name` keyword argument to the `add_parser` method. 128 | 129 | - In general, the main parser _does not_ require a name, unless it is used as a subparser of another parser. 130 | 131 | When executing the `help` command, the following output will be produced: 132 | 133 | ```console 134 | $ python nested_parsers.py --help 135 | > usage: nested_parsers.py [-h] ... 136 | > 137 | > positional arguments: 138 | > 139 | > users 140 | > books 141 | > 142 | > optional arguments: 143 | > -h, --help show this help message and exit 144 | ``` 145 | 146 | !!! note 147 | 148 | The description provided by the `help` command is quite limited at the moment: as you can see, the name of the 149 | subparsers is shown, but not their description. This is a known limitation, and it will be addressed in the future. 150 | 151 | The same can be done on the subgroup, calling the `help` command on the `users` subparser: 152 | 153 | ```console 154 | $ python nested_parsers.py users --help 155 | > usage: nested_parsers.py users [-h] ... 156 | > 157 | > positional arguments: 158 | > 159 | > add-user Adds a single user. 160 | > delete-user 161 | > Deletes a user by name. 162 | > 163 | > optional arguments: 164 | > -h, --help show this help message and exit 165 | ``` 166 | 167 | Finally, the `help` command can be called on the subcommand, showing the description and the arguments: 168 | 169 | ```console 170 | $ python nested_parsers.py users add-user --help 171 | > usage: nested_parsers.py users add-user [-h] --name TEXT --age INT 172 | > 173 | > optional arguments: 174 | > -h, --help show this help message and exit 175 | > --name TEXT (required) 176 | > --age INT (required) 177 | ``` 178 | 179 | Last but not least, the command can be executed, by passing the required arguments: 180 | 181 | ```console 182 | $ python nested_parsers.py users add-user --name "John Doe" --age 30 183 | > Adding user: John Doe (30) 184 | ``` 185 | 186 | Of course, nested models and nested parsers can be combined together, to create a complex hierarchy of commands and arguments. 187 | Fantasy is the limit, well, at least until you run out of RAM. 188 | -------------------------------------------------------------------------------- /docs/guide/customization.md: -------------------------------------------------------------------------------- 1 | # Customization 2 | 3 | Argdantic supports a number of customization options, which can be used to change the default behavior of the CLI. 4 | Being based on `argparse` and `pydantic`, the customization options are limitless, however the most common ones are listed below. 5 | 6 | - **Command names:** a custom help message to be displayed when the `--help` flag is passed. 7 | - **Command description:** a custom help message to be displayed when the `--help` flag is passed. 8 | - **Field descriptions:** a custom help message to be displayed when the `--help` flag is passed. 9 | - **Field aliases:** a list of optional names that can be used instead of the field name. 10 | - **Field default values:** a default value to be used when the field is not provided. 11 | 12 | The following sections will provide a brief overview of these options, and how to use them. 13 | 14 | ## Command Names 15 | 16 | By default, the name of the function that is decorated with `@parser.command()` is used as the name of the command. 17 | This can be changed by passing a `name` argument to the decorator: 18 | 19 | ```python title="main.py" linenums="1" hl_lines="6" 20 | {!examples/customization/custom_name.py!} 21 | ``` 22 | 23 | When executed, the script will provide the following output: 24 | 25 | ```console 26 | $ python main.py --help 27 | > usage: main.py [-h] --name TEXT 28 | > 29 | > optional arguments: 30 | > -h, --help show this help message and exit 31 | > --name TEXT (required) 32 | ``` 33 | 34 | **Wait a minute, where is the custom name?** 35 | 36 | By default, with only one command, the name of the command is not displayed, nor used to execute the command. 37 | This can be changed by: 38 | 39 | 1. Registering more than one command, the easiest option. 40 | 2. By passing the `force_group` argument to the parser. 41 | 42 | ### Multiple Commands 43 | 44 | When more than one command is registered, its name is required to execute that specific CLI function. 45 | For instance, the following script: 46 | 47 | ```python title="main.py" linenums="1" hl_lines="6 14" 48 | {!examples/customization/custom_name_multi.py!} 49 | ``` 50 | 51 | When executed, the script will provide the following output: 52 | 53 | ```console 54 | $ python main.py --help 55 | > usage: main.py [-h] ... 56 | > 57 | > positional arguments: 58 | > 59 | > hi Say hello. 60 | > bye Say goodbye. 61 | > 62 | > optional arguments: 63 | > -h, --help show this help message and exit 64 | ``` 65 | 66 | The `hi` and `bye` commands are now available, and can be executed by passing their name as the first argument: 67 | 68 | ```console 69 | $ python main.py hi --name John 70 | > Hello, John! 71 | ``` 72 | 73 | You also probably noticed that the commands also provide a description. 74 | This can be customized in many ways, and will be covered in the next section. 75 | 76 | ### Forced Groups 77 | 78 | The `force_group` argument can be used to force the creation of a group, even if only one command is registered. 79 | This can be useful if you want to force users to provide the command name upon execution. 80 | 81 | For instance, the following script: 82 | 83 | ```python title="main.py" linenums="1" hl_lines="6 14" 84 | {!examples/customization/custom_name_grouped.py!} 85 | ``` 86 | 87 | When executed, the script will provide the following output: 88 | 89 | ```console 90 | $ python main.py --help 91 | > usage: main.py [-h] ... 92 | > 93 | > positional arguments: 94 | > 95 | > greetings 96 | > Say hello. 97 | > 98 | > optional arguments: 99 | > -h, --help show this help message and exit 100 | ``` 101 | 102 | The `greetings` command is now available, and can be executed by passing its name as the first argument: 103 | 104 | ```console 105 | $ python main.py greetings --name John 106 | > Hello, John! 107 | ``` 108 | 109 | ## Command Descriptions 110 | 111 | You may have noticed from the previous example that the commands also provide a description. 112 | Descriptions can be customized in two simple ways: 113 | 114 | - Automatically, by simply providing a docstring to the function. 115 | 116 | - Manually passing a `help` argument to the `@parser.command()` decorator. 117 | 118 | For instance, the following script: 119 | 120 | ```python title="main.py" linenums="1" hl_lines="6" 121 | {!examples/customization/description.py!} 122 | ``` 123 | Is equivalent to: 124 | 125 | ```python title="main.py" linenums="1" hl_lines="8" 126 | {!examples/customization/description_docs.py!} 127 | ``` 128 | 129 | When executed, the scripts will provide the following output: 130 | 131 | ```console 132 | $ python main.py --help 133 | > usage: main.py [-h] ... 134 | > 135 | > positional arguments: 136 | > 137 | > hello Print a greeting message. 138 | > 139 | > optional arguments: 140 | > -h, --help show this help message and exit 141 | ``` 142 | 143 | ## Default Values 144 | 145 | Of course, any good CLI tool should provide the user with a way to provide default values for the fields. 146 | Given that defining a command is as simple as defining a function, introducing default values can also be as simple as 147 | providing a default value to the function arguments. For instance: 148 | 149 | ```python title="main.py" linenums="1" hl_lines="7" 150 | {!examples/customization/default_values.py!} 151 | ``` 152 | 153 | This can be executed with no arguments without any issues: 154 | 155 | ```console 156 | $ python main.py 157 | > Hello, World! 158 | > You are 42 years old. 159 | ``` 160 | 161 | The default values are also provided in the help message, so that the user is informed about them: 162 | 163 | ```console 164 | $ python main.py --help 165 | > usage: main.py [-h] [--name TEXT] [--age INT] 166 | > 167 | > optional arguments: 168 | > -h, --help show this help message and exit 169 | > --name TEXT (default: World) 170 | > --age INT (default: 42) 171 | ``` 172 | 173 | ### Default Values and Required Fields 174 | 175 | Of course, if a field provides a default value, it is no longer required. 176 | This implies that every field must be assigned in some way, either by providing it beforehand or during execution, 177 | which will respectively add a `default` or `required` flag to the help message. 178 | But, as a famous grand master once said, _there is another_: when the default is `None`, the field is neither 179 | marked as `default` nor `required`, so the help message will not contain any flag. 180 | At the time of writing, this is the only way to provide a true optional field. 181 | 182 | ```python title="main.py" linenums="1" hl_lines="7" 183 | {!examples/customization/default_values_none.py!} 184 | ``` 185 | 186 | The help message will now look like this: 187 | 188 | ```console 189 | $ python main.py --help 190 | > usage: main.py [-h] [--name TEXT] [--age INT] 191 | > 192 | > optional arguments: 193 | > -h, --help show this help message and exit 194 | > --name TEXT 195 | > --age INT 196 | ``` 197 | 198 | These are very simple examples, but they can be extended to any field, including 199 | more complex ones such as `List`, `Dict`, and so on. 200 | There are also a few other ways to provide default values, which will be covered in the next sections. 201 | 202 | ## Field Options 203 | 204 | A CLI cannot be complete without a way to customize the fields. 205 | `argdantic` provides a way to customize the fields through the `ArgField` function, which can be seen 206 | as a light wrapper around _pydantic_'s `Field` with a few changes to make it more suitable for CLI tools. 207 | `ArgField`, on top of the arguments provided by `Field`, provides the following options: 208 | 209 | - `names`: A variable list of names to provide aliases for the field. This substitutes the positional default value in _pydantic_. 210 | 211 | - `default`: A keyword argument to provide a default value for the field. This is now a _keyword_ argument, mirroring `argparse`. 212 | 213 | - `description`: A keyword argument to provide a description for the field. This uses the same functionality of `Field`'s `description`. 214 | 215 | ### Aliases 216 | A common functionality provided by _argparse_ is the ability to provide aliases for the fields. 217 | This option is made available in `argdantic` by using the `ArgField` modifier to the field. 218 | Optional field names can be provided in the following way: 219 | 220 | ```python title="main.py" linenums="1" hl_lines="7" 221 | {!examples/customization/aliases.py!} 222 | ``` 223 | 224 | Executing the script with the `--help` flag will provide the following output: 225 | 226 | ```console 227 | $ python main.py --help 228 | > usage: main.py [-h] --name TEXT --age INT 229 | > 230 | > optional arguments: 231 | > -h, --help show this help message and exit 232 | > --name TEXT, -n TEXT (required) 233 | > --age INT, -a INT (required) 234 | ``` 235 | 236 | The message now shows to the user that the `--name` and `--n` flags are equivalent, as well as the `--age` and `--a` flags. 237 | Let's try to execute the script with the new flags: 238 | 239 | ```console 240 | $ python main.py -n John -a 42 241 | > Hello, John! 242 | > You are 42 years old. 243 | ``` 244 | 245 | ### Default with Fields 246 | 247 | Substituting the default value in the function signature with the `ArgField` modifier does not preclude the use 248 | of the default value in the function signature. This is now possible by using the `default` keyword argument: 249 | 250 | ```python title="main.py" linenums="1" hl_lines="8 9" 251 | {!examples/customization/default_with_fields.py!} 252 | ``` 253 | 254 | As before, the script can be executed without any arguments: 255 | 256 | ```console 257 | $ python main.py 258 | > Hello, World! 259 | > You are 42 years old. 260 | ``` 261 | 262 | ### Descriptions 263 | 264 | Last but not least, CLI arguments are usually accompanied by a description. 265 | This can be provided in the same way as the default value, by using the `description` keyword argument: 266 | 267 | ```python title="main.py" linenums="1" hl_lines="8 9" 268 | {!examples/customization/description_fields.py!} 269 | ``` 270 | 271 | This description will be displayed in the help message: 272 | 273 | ```console 274 | $ python main.py --help 275 | > usage: main.py [-h] [--name TEXT] [--age INT] 276 | > 277 | > optional arguments: 278 | > -h, --help show this help message and exit 279 | > --name TEXT, -n TEXT your name (default: John) 280 | > --age INT, -a INT your age (default: 30) 281 | ``` 282 | 283 | ### Other Options 284 | 285 | There are a few other options that can be provided to the `ArgField` modifier, which are not covered in this tutorial, 286 | but can be found in the `pydantic` documentation. These include, just to name a few, validators, constraints, and so on. 287 | In general, every other argument provided to `Field` can be provided to `ArgField` in the same way. 288 | -------------------------------------------------------------------------------- /docs/guide/intro.md: -------------------------------------------------------------------------------- 1 | # Introduction 2 | 3 | ## Parsers 4 | 5 | The main building block of *argdantic* is represented by a `ArgParser` instance. 6 | Every CLI requires at least one active parser, which serves as main entry point. 7 | A parser simply acts as a collection of commands, which are only executed upon call. 8 | 9 | Any parser must first be imported, instantiated, then called in a main, like so: 10 | 11 | ```python title="main.py" linenums="1" hl_lines="1 3 7" 12 | {!examples/basic/empty.py!} 13 | ``` 14 | However, **this code is not enough to have a working CLI**. If you attempt to run it you will obtain: 15 | ```console 16 | $ python main.py 17 | > AssertionError: Parser must have at least one command or group of commands 18 | ``` 19 | This is the expected behavior, as a parser without any command is useless: check the [Commands](#commands) section for more information. 20 | 21 | ## Commands 22 | 23 | Commands are the backbone of any parser. Underneath, they are simply functions that are called when requested by the user. 24 | A command can be added to a parser by using the `@parser.command()` decorator, like so: 25 | 26 | ```python title="main.py" linenums="1" hl_lines="6" 27 | {!examples/basic/empty_cmd.py!} 28 | ``` 29 | 30 | When executed, the script will provide the following output: 31 | ```console 32 | $ python main.py 33 | > Hello World! 34 | ``` 35 | This is a step forward, however the command is still not very useful. Let's see how to add arguments to it. 36 | 37 | ## Arguments 38 | 39 | Arguments are the way to provide information and dynamic functionality to a command. 40 | They are defined by simply adding them to the command's signature, like so: 41 | 42 | ```python title="main.py" linenums="1" hl_lines="7" 43 | {!examples/basic/simple.py!} 44 | ``` 45 | 46 | !!! note 47 | Of course, typing is crucial to ensure that `argdantic` can correctly parse the arguments. 48 | The framework however will be kind enough to provide an error message if a field does not provide a type annotation. 49 | 50 | 51 | When executed, the script will provide the following output: 52 | ```console 53 | $ python main.py 54 | > usage: main.py [-h] --name TEXT 55 | > main.py: error: the following arguments are required: --name 56 | ``` 57 | This is correct, as the `--name` argument is required. Let's see how to provide it. 58 | 59 | ```console 60 | $ python main.py --name John 61 | > Hello, John! 62 | ``` 63 | 64 | ## Help Messages 65 | 66 | Of course, randomly executing a command without any information is not very useful. 67 | The `--help` argument is automatically added to every command, and provides a summary of the command's arguments. 68 | For instance, running the help command on the previous example will provide the following output: 69 | 70 | ```console 71 | $ python main.py --help 72 | > usage: main.py [-h] --name TEXT 73 | > 74 | > optional arguments: 75 | > -h, --help show this help message and exit 76 | > --name TEXT (required) 77 | ``` 78 | 79 | You may have noticed two things: if you are familiar with `argparse`, you probably already know that 80 | the `--help` argument is automatically added to every command. In addition, `argdantic` explicitly 81 | provides the `(required)` tag to every argument that does not specify a default value. 82 | This is done to ensure that the user is aware of some missing options, even before the command is executed. 83 | -------------------------------------------------------------------------------- /docs/guide/sources.md: -------------------------------------------------------------------------------- 1 | # CLI Sources 2 | 3 | `argdantic` allows you to define the arguments of your CLI in a variety of ways, including: 4 | 5 | - Command line arguments, using `argparse` 6 | 7 | - Environment variables or `.env` files, using [python-dotenv](https://github.com/theskumar/python-dotenv) 8 | 9 | - Configuration files, using [JSON](https://www.json.org/json-en.html), [YAML](https://yaml.org/), or [TOML](https://toml.io/en/) files. 10 | 11 | Each of these input sources can be used independently, or in combination with each other. 12 | The priority of the input sources is given by the order in which they are defined, with the last one having the highest priority. 13 | Of course, the command line arguments always have the highest priority, and they can be used to override any other input source. 14 | 15 | Since every command is virtually independent, **sources are part of the command definition**. 16 | This means that you can define different sources for different commands or models in the same CLI. 17 | 18 | ## Static Sources 19 | 20 | The simplest kind of source is a static source, where the values are defined at the time of the command definition. 21 | 22 | For instance, the following example defines a single command with many different sources: 23 | 24 | ```python title="sources.py" linenums="1" hl_lines="6-11 31-36" 25 | {!examples/sources/single_cmd.py!} 26 | ``` 27 | 28 | If you try to run the command as it is, you will get an error because the JSON and TOML files are not defined. 29 | Comment out the lines that define the JSON and TOML sources, and run the command again. 30 | You will see that the command runs successfully, and the arguments are taken from the YAML file: 31 | 32 | ```bash 33 | $ python sources.py 34 | > name='example' description='Example item' price=2.3 tags={'example', 'item', 'tag'} image=Image(url='https://example.com/image.jpg', name='example.jpg') 35 | ``` 36 | 37 | !!! warning 38 | 39 | Support for sources is still experimental, and the API may change in the future. 40 | The `required` flags are currently a limitation for file sources, as they force users 41 | to define CLI arguments that may be set via file. 42 | **Use default values or `None` as a workaround.** 43 | 44 | ## Dynamic Sources 45 | 46 | Reading or writing a full configuration from scratch may not be your cup of tea. 47 | Sometimes you may want to define a model with its own fields, reading its configuration 48 | from a file, while still being able to override some of its fields from the command line. 49 | 50 | Imagine you have a model like this: 51 | 52 | ```python title="models.py" linenums="1" 53 | from pydantic import BaseModel 54 | 55 | class Fruit(BaseModel): 56 | name: str 57 | color: str 58 | price: float 59 | ``` 60 | 61 | The CLI may define a `--fruit` argument to point to a file with the content of a `Fruit` instance, as well as a `--fruit.name` argument, or `--fruit.color` argument, etc. 62 | 63 | In argdantic, you can do that with the `from_file` annotation. 64 | 65 | ```python title="dynamic.py" linenums="1" hl_lines="4 7 14" 66 | {!examples/sources/dynamic.py!} 67 | ``` 68 | 69 | without additional configuration, the `from_file` decorator will automatically add an extra argument, equal to the name of the field, to the command line interface, in this case `--dataset` and `--optim`: 70 | 71 | This will enable two extra arguments, namely `--dataset` and `--optim: 72 | 73 | ```diff 74 | $ python dynamic.py --help 75 | usage: models.py [-h] [--dataset.name TEXT] [--dataset.batch-size INT] [--dataset.tile-size INT] [--dataset.shuffle | --no-dataset.shuffle] --dataset PATH 76 | [--optim.name TEXT] [--optim.learning-rate FLOAT] [--optim.momentum FLOAT] --optim PATH 77 | 78 | options: 79 | -h, --help show this help message and exit 80 | --dataset.name TEXT (default: CIFAR10) 81 | --dataset.batch-size INT 82 | (default: 32) 83 | --dataset.tile-size INT 84 | (default: 256) 85 | --dataset.shuffle (default: True) 86 | --no-dataset.shuffle 87 | + --dataset PATH (required) 88 | --optim.name TEXT (default: SGD) 89 | --optim.learning-rate FLOAT 90 | (default: 0.01) 91 | --optim.momentum FLOAT 92 | (default: 0.9) 93 | + --optim PATH (required) 94 | ``` 95 | 96 | Invoking the command with the `--dataset` and `--optim` arguments will read the configuration from the files, which are defined as follows: 97 | 98 | ```yaml title="resources/dataset.yml" 99 | {!examples/sources/resources/dataset.yml!} 100 | ``` 101 | 102 | ```yaml title="resources/optim.yml" 103 | {!examples/sources/resources/optim.yml!} 104 | ``` 105 | 106 | ```console 107 | $ python dynamic.py --dataset resources/dataset.yml --optim resources/optim.yml 108 | name='coco' batch_size=32 tile_size=512 shuffle=True 109 | name='adam' learning_rate=0.001 momentum=0.9 110 | ``` 111 | 112 | ### Customizing the `from_file` behavior 113 | 114 | The `from_file` decorator has a few options that can be used to customize its behavior: 115 | 116 | - `required`: If `True`, the file path is required. If `False`, the file path is optional. Defaults to `True`. 117 | 118 | - `loader`: A function that takes as input the model class itself, and the file path, and returns an instance of the model. `argdantic` provides three built-in loaders: 119 | - `JsonFileLoader` 120 | - `YamlFileLoader` 121 | - `TomlFileLoader` 122 | 123 | - `use_field`: When specified, the model field indicated by the string will be used as the file path to look for the configuration. 124 | In this case, the extra argument will not be added to the command line interface, and the file path is naturally provided by the pydantic model itself. It may be useful when the file path is needed later on. 125 | 126 | Here's an example providing both the `required` and `use_field` options: 127 | 128 | ```python title="dynamic_custom.py" linenums="1" hl_lines="6 9 17" 129 | {!examples/sources/dynamic_custom.py!} 130 | ``` 131 | 132 | Specifying the following command will read the configuration from the optim instance only: 133 | 134 | ```diff 135 | +$ python dynamic_custom.py --optim.path resources/optim.yml 136 | name='CIFAR10' batch_size=32 tile_size=256 shuffle=True 137 | path=PosixPath('resources/optim.yml') name='adam' learning_rate=0.001 momentum=0.9 138 | ``` 139 | 140 | Notice that the path this time is provided using a standard field, but the loader automatically reads the configuration from the specified file. 141 | -------------------------------------------------------------------------------- /docs/guide/types.md: -------------------------------------------------------------------------------- 1 | # Field Types 2 | 3 | Thanks to features provided by _pydantic_'s data definitions, `argdantic` supports a large amount of field types, 4 | starting from the standard library up to JSON inputs. 5 | 6 | ## Primitive types 7 | 8 | Considering primitive, non-complex data types, the library supports the following: 9 | 10 | - `str`: values accepted as is, parsed as simple text without further processing. 11 | 12 | - `int`: tries to convert any given input into an integer through `int(value)`. 13 | 14 | - `float`: similarly, tries to convert any given input into a floating point number through `float(value)`. 15 | 16 | - `bytes`: similar to strings, however in this case the underlying representation remains in bytes. 17 | 18 | - `bool`: by default, booleans are intended as flag options. In this case any boolean field will have two corresponding CLI flags `--field/--no-field`. 19 | 20 | The following example shows a brief overview of the primitive types: 21 | 22 | ```python title="primitives.py" linenums="1" 23 | {!examples/typing/primitive.py!} 24 | ``` 25 | 26 | With the following help message: 27 | ```console 28 | $ python primitives.py --help 29 | > usage: primitives.py [-h] --name TEXT --age INT --weight FLOAT --data BYTES (--flag | --no-flag) 30 | > 31 | > optional arguments: 32 | > -h, --help show this help message and exit 33 | > --name TEXT (required) 34 | > --age INT (required) 35 | > --weight FLOAT (required) 36 | > --data BYTES (required) 37 | > --flag 38 | > --no-flag 39 | ``` 40 | 41 | !!! note 42 | 43 | Observe that the `--flag` and `--no-flag` options are not marked as required. 44 | That is the expected behaviour: strictly speaking, _taken individually_, they are not required. 45 | However, being mutually exclusive, one of either `--flag` or `--no-flag` is still needed. 46 | 47 | `argdantic`` takes care of converting the provided fields into _argparse_ arguments, so that the automatically generated description reamins as faithful as possible. 48 | Bear in mind that types are exploited only for documentation purposes, the final type checking will be carried out by _pydantic_. 49 | Most complex types are often interpreted as strings, unless specified otherwise. 50 | 51 | ## Complex types 52 | 53 | Thanks to the powerful data definitions provided by _pydantic_, `argdantic` supports a large amount of complex types, 54 | Currently, the following types have been tested and supported: 55 | 56 | ### Standard Library types 57 | 58 | Generally speaking, non-typed complex types will default to strings unless specified otherwise. 59 | 60 | - `list`: without specifying the internal type, _list_ fields will behave as _multiple_ options of string items. 61 | Internally, _argdantic_ exploits _argparse's `nargs` option to handle sequences. 62 | In this case, the argument can be repeated multiple times to build a list. 63 | For instance, `python cli.py --add 1 2` will result in a list `[1, 2]`. 64 | 65 | - `tuple`: similar to lists, this will behave as an unbounded sequence of strings, with multiple parameters. 66 | 67 | - `dict`: dictionaries are interpreted as JSON strings. In this case, there will be no further validation. 68 | Given that valid JSON strings require double quotes, arguments provided through the command line must use single-quoted strings. 69 | For instance, `python cli.py --extras '{"items": 12}'` will be successfully parsed, while `python cli.py --extras "{'items': 12}"` will not. 70 | 71 | - `set`: again, from a command line point of view, sets are a simple list of values. In this case, repeated values will be excluded. 72 | For instance, `python cli.py --add a --add b --add a` will result in a set `{'a', 'b'}`. 73 | 74 | - `frozenset`: _frozen_ sets adopt the same behavior as normal sets, with the only difference that they remain immutable. 75 | 76 | - `deque`: similarly, _deques_ act as sequences from a CLI standpoint, while being treaded as double-ended queues in code. 77 | 78 | - `range`: ranges are interpreted as a sequence of integers, with the same behavior as lists and tuples. 79 | 80 | ### Typing Containers 81 | 82 | - `Any`: For obvious reasons, _Any_ fields will behave as `str` options without further processing. 83 | 84 | - `Optional`: optional typing can be interpreted as _syntactic sugar_, meaning it will not have any effect on the underlying 85 | validation, but it provides an explicit declaration that the field can also accept `None` as value. 86 | 87 | - `List`: Similar to standard lists, typing _Lists_ behave as sequences of items. In this case however the inner type is 88 | exploited to provide further validation through _pydantic_. 89 | For instance, `python cli.py --add a --add b` will result in a validation error for a list of integers `List[int]`. 90 | 91 | - `Tuple`: typing _Tuples_ can behave in two ways: when using a _variable length_ structure (i.e., `Tuple[int]` or `Tuple[int, ...]`), 92 | tuples act as a sequence of typed items, validated through _pydantic, where the parameter is specified multiple times. 93 | When using a _fixed length_ structure (i.e., `Tuple[int, int]` or similar), they are considered as fixed `nargs` options, 94 | where the parameter is specified once, followed by the sequence of values separated by whitespaces. 95 | For instance . `python cli.py --items a b c` will results in a tuple `('a', 'b', 'c')`. 96 | If the `items` tuple specified only two items, the command will result in a validation error. 97 | 98 | - `Dict`: Similar to the standard `dict` field, typing dictionaries require a JSON string as input. However, inner types 99 | allow for a finer validation: for instance, considering a `metrics: Dict[str, float]` field, `--metrics '{"f1": 0.93}'` is accepted, 100 | while `--metrics '{"auc": "a"}'` is not a valid input. 101 | 102 | - `Deque`: with the same reasoning of typed lists and tuples, _Deques_ will act as sequences with a specific type. 103 | 104 | - `Set`: As you guessed, typed sets act as multiple options where repeated items are excluded, with additional type validation 105 | on the items themselves. 106 | 107 | - `FrozenSet`: as with _Sets_, but they represent immutable structures after parsing. 108 | 109 | - `Sequence` and `Iterables`: with no surpise, sequences and iterables act as sequences, nothing much to add here. 110 | 111 | !!! warning 112 | 113 | for obvious reasons, `Union` typings are not supported at this time. 114 | Parsing a multi-valued parameter is really more of a phylosophical problem than a technical one. 115 | Future releases will consider the support for this typing. 116 | 117 | 118 | The code below provides a relatively comprehensive view of most container types supported through `argdantic`. 119 | 120 | 121 | ```python title="containers.py" linenums="1" 122 | {!examples/typing/containers.py!} 123 | ``` 124 | 125 | Executing this script with the _help_ command will provide the description for the current configuration. 126 | Also, defaults are allowed and validated. 127 | 128 | ```console 129 | $ python containers.py --help 130 | > usage: containers.py [-h] --simple-list TEXT [TEXT ...] --list-of-ints INT [INT ...] 131 | > --simple-tuple TEXT [TEXT ...] --multi-typed-tuple INT FLOAT TEXT BOOL --simple-dict JSON 132 | > --dict-str-float JSON --simple-set TEXT [TEXT ...] --set-bytes BYTES [BYTES ...] 133 | > --frozen-set INT [INT ...] --none-or-str TEXT --sequence-of-ints INT [INT ...] 134 | > --compound JSON --deque INT [INT ...] 135 | > 136 | > optional arguments: 137 | > -h, --help show this help message and exit 138 | > --simple-list TEXT [TEXT ...] (required) 139 | > --list-of-ints INT [INT ...] (required) 140 | > --simple-tuple TEXT [TEXT ...] (required) 141 | > --multi-typed-tuple INT FLOAT TEXT BOOL (required) 142 | > --simple-dict JSON (required) 143 | > --dict-str-float JSON (required) 144 | > --simple-set TEXT [TEXT ...] (required) 145 | > --set-bytes BYTES [BYTES ...] (required) 146 | > --frozen-set INT [INT ...] (required) 147 | > --none-or-str TEXT (required) 148 | > --sequence-of-ints INT [INT ...] (required) 149 | > --compound JSON (required) 150 | > --deque INT [INT ...] (required) 151 | ``` 152 | 153 | 154 | ### Literals and Enums 155 | 156 | Sometimes it may be useful to directly limit the choices of certain fields, 157 | by letting the user select among a fixed list of values. 158 | In this case, `argdantic` provides this feature using _pydantic_'s support for `Enum` and `Literal` types, 159 | parsed from the command line through the `choice` argument option. 160 | 161 | While _Enums_ represent the standard way to provide choice-based options, _Literals_ can be seen as a lightweight enumeration. 162 | In general, the latter are simpler and easier to handle than the former for most use cases. 163 | _Enums_ on the other hand provide both a `name` and a `value` component, where only the former is exploited for the parameter definition. 164 | The latter can represent any kind of object, therefore making _enums_ more suitable for more complex use cases. 165 | 166 | The following script presents a sample of possible choice definitions in _clidantic_: 167 | ```python title="choices.py" linenums="1" 168 | {!examples/typing/choices.py!} 169 | ``` 170 | 171 | !!! warning 172 | 173 | As you probably noticed, the string enumeration only subclasses `Enum`. 174 | Strictly speaking, `ToolEnum(str, Enum)` would be a better inheritance definition, however this breaks the type 175 | inference by providing two origins. 176 | 177 | Currently, there are two solutions: 178 | 179 | - **simply use Enum**, it should be fine in most cases. 180 | - **use StrEnum**, which however is only available since Python 3.11. 181 | 182 | Launching the help for this script will result in the following output: 183 | ```console 184 | $ python choices.py --help 185 | > usage: choices.py [-h] [--a [one|two]] [--b [1|2]] [--c [True|False]] [--d [hammer|screwdriver]] [--e [ok|not_found|internal_error]] 186 | > 187 | > optional arguments: 188 | > -h, --help show this help message and exit 189 | > --a [one|two] (default: two) 190 | > --b [1|2] (default: 2) 191 | > --c [True|False] (default: True) 192 | > --d [hammer|screwdriver] (default: ToolEnum.hammer) 193 | > --e [ok|not_found|internal_error] (default: HTTPEnum.not_found) 194 | ``` 195 | 196 | You can notice that, even without explicit description, 197 | choice-based fields will automatically provide the list of possible values. 198 | Defaults also behave as expected: both literals and enums will accept any of the allowed values as default, and it that 199 | case the selected item will be displayed as _default_ in the console. 200 | Again, note that the CLI exploits the `name` field in enum-based arguments for readability, not its actual value. 201 | 202 | Calling the script with a wrong choice will result in an error message, displaying the list of allowed values: 203 | ```console 204 | $ python choices.py --a three 205 | > usage: choices.py [-h] [--a [one|two]] [--b [1|2]] [--c [True|False]] [--d [hammer|screwdriver]] [--e [ok|not_found|internal_error]] 206 | > choices.py: error: argument --a: invalid choice: three (choose from [one|two]) 207 | ``` 208 | 209 | ### Module types 210 | 211 | !!! note 212 | 213 | Coming soon! 214 | -------------------------------------------------------------------------------- /docs/index.md: -------------------------------------------------------------------------------- 1 | # Argdantic 2 | 3 | *Typed command line interfaces, powered by [argparse](https://docs.python.org/3/library/argparse.html) and [pydantic](https://github.com/pydantic/pydantic).* 4 | 5 | 6 | ## Features 7 | 8 | `argdantic` provides a thin boilerplate layer to provide a modern CLI experience, including: 9 | 10 | - **Typed arguments:** arguments require full typing by default, enforcing clarity and help your editor provide better support (linting, hinting). 11 | 12 | - **Nested models:** exploit `pydantic` models to scale from simple primitives to complex nested configurations with little effort. 13 | 14 | - **Nested commands:** combine commands and build complex hierarchies to build complex interfaces. 15 | 16 | - **Validation by default:** thanks to `pydantic`, field validation is provided by default, with the desired complexity. 17 | 18 | - **Multiple sources:** arguments can be provided from multiple sources, including environment variables, JSON, TOML and YAML files. 19 | 20 | ## Quickstart 21 | 22 | ### Installation 23 | Installing `argdantic` can be done from source, or simply using `pip`. 24 | The only required dependency is, of course, *pydantic*, while the remaining can be selected depending on your needs: 25 | ```console 26 | recommended choice: install everything 27 | this includes orjson, pyyaml, tomli, python-dotenv 28 | user@pc:~$ pip install argdantic[all] 29 | 30 | env, json, toml or yaml dependencies 31 | user@pc:~$ pip install argdantic[env|json|toml|yaml] 32 | 33 | minimum requirement, only pydantic included 34 | user@pc:~$ pip install argdantic 35 | ``` 36 | 37 | ### A Simple Example 38 | 39 | Creating a CLI with `argdantic` can be as simple as: 40 | ```python 41 | from argdantic import ArgParser 42 | 43 | # 1. create a CLI instance 44 | parser = ArgParser() 45 | 46 | 47 | # 2. decorate the function to be called 48 | @parser.command() 49 | def buy(name: str, quantity: int, price: float): 50 | print(f"Bought {quantity} {name} at ${price:.2f}.") 51 | 52 | # 3. Use your CLI by simply calling it 53 | if __name__ == "__main__": 54 | parser() 55 | ``` 56 | Then, in a terminal, the `help` command can provide the usual information: 57 | 58 | ```console 59 | $ python cli.py --help 60 | > usage: buy [-h] --name TEXT --quantity INT --price FLOAT 61 | > 62 | > optional arguments: 63 | > -h, --help show this help message and exit 64 | > --name TEXT 65 | > --quantity INT 66 | > --price FLOAT 67 | ``` 68 | This gives us the required arguments for the execution: 69 | ```console 70 | $ python cli.py --name apples --quantity 10 --price 3.4 71 | > Bought 10 apples at $3.40. 72 | ``` 73 | 74 | ## Acknowledgements 75 | 76 | This project is heavily inspired by other awesome works, including: 77 | 78 | - [click](https://github.com/pallets/click): the most popular CLI library for Python for complex applications, the best alternative to `argparse`. 79 | 80 | - [typer](https://github.com/tiangolo/typer): based on `click`, a great project that inspired the creation of `argdantic`. It is a great alternative, however it does not support `pydantic` models at the moment. 81 | 82 | - [pydantic-cli](https://github.com/mpkocher/pydantic-cli): a mature project that provides a similar experience to `argdantic`, however it does not support nested models, commands and different sources. 83 | 84 | Do you like `argdantic`, but prefer `click` as a CLI library? Check out [clidantic](https://github.com/edornd/clidantic), a twin project that uses `click` instead of `argparse`. 85 | 86 | ## License 87 | 88 | This project is licensed under the terms of the MIT license. 89 | 90 | ## Contributing 91 | 92 | Contributions are welcome, and they are greatly appreciated! Every little bit helps, and credit will always be given. 93 | -------------------------------------------------------------------------------- /examples/basic/empty.py: -------------------------------------------------------------------------------- 1 | from argdantic import ArgParser 2 | 3 | cli = ArgParser() 4 | 5 | 6 | if __name__ == "__main__": 7 | cli() 8 | -------------------------------------------------------------------------------- /examples/basic/empty_cmd.py: -------------------------------------------------------------------------------- 1 | from argdantic import ArgParser 2 | 3 | cli = ArgParser() 4 | 5 | 6 | @cli.command() 7 | def hello_world(): 8 | print("Hello World!") 9 | 10 | 11 | if __name__ == "__main__": 12 | cli() 13 | -------------------------------------------------------------------------------- /examples/basic/simple.py: -------------------------------------------------------------------------------- 1 | from argdantic import ArgParser 2 | 3 | cli = ArgParser() 4 | 5 | 6 | @cli.command() 7 | def hello(name: str): 8 | print(f"Hello, {name}!") 9 | 10 | 11 | if __name__ == "__main__": 12 | cli() 13 | -------------------------------------------------------------------------------- /examples/composition/nested_models.py: -------------------------------------------------------------------------------- 1 | from typing import Set 2 | 3 | from pydantic import BaseModel 4 | 5 | from argdantic import ArgParser 6 | 7 | 8 | class Image(BaseModel): 9 | url: str 10 | name: str 11 | 12 | 13 | class Item(BaseModel): 14 | name: str 15 | description: str = None 16 | price: float 17 | tags: Set[str] = set() 18 | image: Image = None 19 | 20 | 21 | cli = ArgParser() 22 | 23 | 24 | @cli.command() 25 | def create_item(item: Item): 26 | print(item) 27 | 28 | 29 | if __name__ == "__main__": 30 | cli() 31 | -------------------------------------------------------------------------------- /examples/composition/nested_parsers.py: -------------------------------------------------------------------------------- 1 | from argdantic import ArgParser 2 | 3 | users = ArgParser(name="users") 4 | books = ArgParser(name="books") 5 | 6 | 7 | @users.command() 8 | def add_user(name: str, age: int): 9 | """Adds a single user.""" 10 | print(f"Adding user: {name} ({age})") 11 | 12 | 13 | @users.command() 14 | def delete_user(name: str): 15 | """Deletes a user by name.""" 16 | print(f"Deleting user: {name}") 17 | 18 | 19 | @books.command() 20 | def add_book(name: str, author: str): 21 | """Adds a book, with name and author.""" 22 | print(f"Adding book: {name} ({author})") 23 | 24 | 25 | @books.command() 26 | def delete_book(name: str): 27 | """Deletes a book by name.""" 28 | print(f"Deleting book: {name}") 29 | 30 | 31 | cli = ArgParser() 32 | cli.add_parser(users) 33 | cli.add_parser(books) 34 | 35 | if __name__ == "__main__": 36 | cli() 37 | -------------------------------------------------------------------------------- /examples/composition/singleton.py: -------------------------------------------------------------------------------- 1 | from datetime import date 2 | from typing import List 3 | 4 | from pydantic import BaseModel 5 | 6 | from argdantic import ArgParser 7 | 8 | 9 | class Image(BaseModel): 10 | name: str 11 | 12 | 13 | class Item(BaseModel): 14 | name: str 15 | description: str 16 | image: Image | None = None 17 | dates: List[date] 18 | 19 | 20 | cli = ArgParser() 21 | 22 | 23 | @cli.command(singleton=True) 24 | def create_item(item: Item): 25 | print(item) 26 | 27 | 28 | if __name__ == "__main__": 29 | cli() 30 | -------------------------------------------------------------------------------- /examples/customization/aliases.py: -------------------------------------------------------------------------------- 1 | from argdantic import ArgField, ArgParser 2 | 3 | cli = ArgParser() 4 | 5 | 6 | @cli.command() 7 | def hello(name: str = ArgField("-n"), age: int = ArgField("-a")): 8 | """Print a greeting message.""" 9 | print(f"Hello, {name}!") 10 | print(f"You are {age} years old.") 11 | 12 | 13 | if __name__ == "__main__": 14 | cli() 15 | -------------------------------------------------------------------------------- /examples/customization/custom_name.py: -------------------------------------------------------------------------------- 1 | from argdantic import ArgParser 2 | 3 | cli = ArgParser() 4 | 5 | 6 | @cli.command(name="greetings") 7 | def hello(name: str): 8 | print(f"Hello, {name}!") 9 | 10 | 11 | if __name__ == "__main__": 12 | cli() 13 | -------------------------------------------------------------------------------- /examples/customization/custom_name_grouped.py: -------------------------------------------------------------------------------- 1 | from argdantic import ArgParser 2 | 3 | cli = ArgParser(force_group=True) 4 | 5 | 6 | @cli.command(name="greetings") 7 | def hello(name: str): 8 | """ 9 | Say hello. 10 | """ 11 | print(f"Hello, {name}!") 12 | 13 | 14 | if __name__ == "__main__": 15 | cli() 16 | -------------------------------------------------------------------------------- /examples/customization/custom_name_multi.py: -------------------------------------------------------------------------------- 1 | from argdantic import ArgParser 2 | 3 | cli = ArgParser() 4 | 5 | 6 | @cli.command(name="hi") 7 | def hello(name: str): 8 | """ 9 | Say hello. 10 | """ 11 | print(f"Hello, {name}!") 12 | 13 | 14 | @cli.command(name="bye") 15 | def goodbye(name: str): 16 | """ 17 | Say goodbye. 18 | """ 19 | print(f"Goodbye, {name}!") 20 | 21 | 22 | if __name__ == "__main__": 23 | cli() 24 | -------------------------------------------------------------------------------- /examples/customization/default_values.py: -------------------------------------------------------------------------------- 1 | from argdantic import ArgParser 2 | 3 | cli = ArgParser() 4 | 5 | 6 | @cli.command() 7 | def hello(name: str = "World", age: int = 42): 8 | """Print a greeting message.""" 9 | print(f"Hello, {name}!") 10 | print(f"You are {age} years old.") 11 | 12 | 13 | if __name__ == "__main__": 14 | cli() 15 | -------------------------------------------------------------------------------- /examples/customization/default_values_none.py: -------------------------------------------------------------------------------- 1 | from argdantic import ArgParser 2 | 3 | cli = ArgParser() 4 | 5 | 6 | @cli.command() 7 | def hello(name: str = None, age: int = None): 8 | """Print a greeting message.""" 9 | print(f"Hello, {name}!") 10 | print(f"You are {age} years old.") 11 | 12 | 13 | if __name__ == "__main__": 14 | cli() 15 | -------------------------------------------------------------------------------- /examples/customization/default_with_fields.py: -------------------------------------------------------------------------------- 1 | from argdantic import ArgField, ArgParser 2 | 3 | cli = ArgParser() 4 | 5 | 6 | @cli.command() 7 | def hello( 8 | name: str = ArgField("-n", default="John"), 9 | age: int = ArgField("-a", default=30), 10 | ): 11 | """Print a greeting message.""" 12 | print(f"Hello, {name}!") 13 | print(f"You are {age} years old.") 14 | 15 | 16 | if __name__ == "__main__": 17 | cli() 18 | -------------------------------------------------------------------------------- /examples/customization/description.py: -------------------------------------------------------------------------------- 1 | from argdantic import ArgParser 2 | 3 | cli = ArgParser(force_group=True) 4 | 5 | 6 | @cli.command(help="Print a greeting message") 7 | def hello(name: str): 8 | print(f"Hello, {name}!") 9 | 10 | 11 | if __name__ == "__main__": 12 | cli() 13 | -------------------------------------------------------------------------------- /examples/customization/description_docs.py: -------------------------------------------------------------------------------- 1 | from argdantic import ArgParser 2 | 3 | cli = ArgParser(force_group=True) 4 | 5 | 6 | @cli.command() 7 | def hello(name: str): 8 | """Print a greeting message.""" 9 | print(f"Hello, {name}!") 10 | 11 | 12 | if __name__ == "__main__": 13 | cli() 14 | -------------------------------------------------------------------------------- /examples/customization/description_fields.py: -------------------------------------------------------------------------------- 1 | from argdantic import ArgField, ArgParser 2 | 3 | cli = ArgParser() 4 | 5 | 6 | @cli.command() 7 | def hello( 8 | name: str = ArgField("-n", default="John", description="your name"), 9 | age: int = ArgField("-a", default=30, description="your age"), 10 | ): 11 | """Print a greeting message.""" 12 | print(f"Hello, {name}!") 13 | print(f"You are {age} years old.") 14 | 15 | 16 | if __name__ == "__main__": 17 | cli() 18 | -------------------------------------------------------------------------------- /examples/sources/dynamic.py: -------------------------------------------------------------------------------- 1 | from pydantic import BaseModel 2 | 3 | from argdantic import ArgParser 4 | from argdantic.sources import YamlFileLoader, from_file 5 | 6 | 7 | @from_file(loader=YamlFileLoader) 8 | class Optimizer(BaseModel): 9 | name: str = "SGD" 10 | learning_rate: float = 0.01 11 | momentum: float = 0.9 12 | 13 | 14 | @from_file(loader=YamlFileLoader) 15 | class Dataset(BaseModel): 16 | name: str = "CIFAR10" 17 | batch_size: int = 32 18 | tile_size: int = 256 19 | shuffle: bool = True 20 | 21 | 22 | cli = ArgParser() 23 | 24 | 25 | @cli.command() 26 | def create_item(dataset: Dataset, optim: Optimizer): 27 | print(dataset) 28 | print(optim) 29 | 30 | 31 | if __name__ == "__main__": 32 | cli() 33 | -------------------------------------------------------------------------------- /examples/sources/dynamic_custom.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | 3 | from pydantic import BaseModel 4 | 5 | from argdantic import ArgParser 6 | from argdantic.sources import YamlFileLoader, from_file 7 | 8 | 9 | @from_file(loader=YamlFileLoader, use_field="path") 10 | class Optimizer(BaseModel): 11 | path: Path 12 | name: str = "SGD" 13 | learning_rate: float = 0.01 14 | momentum: float = 0.9 15 | 16 | 17 | @from_file(loader=YamlFileLoader, required=False) 18 | class Dataset(BaseModel): 19 | name: str = "CIFAR10" 20 | batch_size: int = 32 21 | tile_size: int = 256 22 | shuffle: bool = True 23 | 24 | 25 | cli = ArgParser() 26 | 27 | 28 | @cli.command() 29 | def create_item(optim: Optimizer, dataset: Dataset = Dataset()): 30 | print(dataset) 31 | print(optim) 32 | 33 | 34 | if __name__ == "__main__": 35 | cli() 36 | -------------------------------------------------------------------------------- /examples/sources/resources/dataset.yml: -------------------------------------------------------------------------------- 1 | name: coco 2 | batch_size: 32 3 | tile_size: 512 4 | shuffle: true 5 | -------------------------------------------------------------------------------- /examples/sources/resources/optim.yml: -------------------------------------------------------------------------------- 1 | name: "adam" 2 | learning_rate: 0.001 3 | momentum: 0.9 4 | -------------------------------------------------------------------------------- /examples/sources/settings.yaml: -------------------------------------------------------------------------------- 1 | item: 2 | name: example 3 | description: Example item 4 | price: 2.3 5 | tags: 6 | - example 7 | - item 8 | - tag 9 | image: 10 | url: https://example.com/image.jpg 11 | name: example.jpg 12 | -------------------------------------------------------------------------------- /examples/sources/single_cmd.py: -------------------------------------------------------------------------------- 1 | from typing import Set 2 | 3 | from pydantic import BaseModel 4 | 5 | from argdantic import ArgParser 6 | from argdantic.sources import ( 7 | EnvSettingsSource, 8 | JsonSettingsSource, 9 | TomlSettingsSource, 10 | YamlSettingsSource, 11 | ) 12 | 13 | 14 | class Image(BaseModel): 15 | url: str = None 16 | name: str = None 17 | 18 | 19 | class Item(BaseModel): 20 | name: str = "test" 21 | description: str = None 22 | price: float = 10.0 23 | tags: Set[str] = set() 24 | image: Image = None 25 | 26 | 27 | cli = ArgParser() 28 | 29 | 30 | @cli.command( 31 | sources=[ 32 | EnvSettingsSource(env_file=".env"), 33 | JsonSettingsSource(path="settings.json"), 34 | YamlSettingsSource(path="settings.yaml"), 35 | TomlSettingsSource(path="settings.toml"), 36 | ] 37 | ) 38 | def create_item(item: Item): 39 | print(item) 40 | 41 | 42 | if __name__ == "__main__": 43 | cli() 44 | -------------------------------------------------------------------------------- /examples/typing/choices.py: -------------------------------------------------------------------------------- 1 | from enum import Enum, IntEnum 2 | from typing import Literal 3 | 4 | from argdantic import ArgParser 5 | 6 | cli = ArgParser() 7 | 8 | 9 | class ToolEnum(Enum): 10 | hammer = "Hammer" 11 | screwdriver = "Screwdriver" 12 | 13 | 14 | class HTTPEnum(IntEnum): 15 | ok = 200 16 | not_found = 404 17 | internal_error = 500 18 | 19 | 20 | @cli.command() 21 | def run( 22 | a: Literal["one", "two"] = "two", 23 | b: Literal[1, 2] = 2, 24 | c: Literal[True, False] = True, 25 | d: ToolEnum = ToolEnum.hammer, 26 | e: HTTPEnum = HTTPEnum.not_found, 27 | ): 28 | print(f"a: {a}") 29 | print(f"b: {b}") 30 | print(f"c: {c}") 31 | print(f"d: {d}") 32 | print(f"e: {e}") 33 | 34 | 35 | if __name__ == "__main__": 36 | cli() 37 | -------------------------------------------------------------------------------- /examples/typing/containers.py: -------------------------------------------------------------------------------- 1 | from typing import Deque, Dict, FrozenSet, List, Optional, Sequence, Set, Tuple 2 | 3 | from argdantic import ArgParser 4 | 5 | cli = ArgParser() 6 | 7 | 8 | @cli.command() 9 | def run( 10 | simple_list: list, 11 | list_of_ints: List[int], 12 | simple_tuple: tuple, 13 | multi_typed_tuple: Tuple[int, float, str, bool], 14 | simple_dict: dict, 15 | dict_str_float: Dict[str, float], 16 | simple_set: set, 17 | set_bytes: Set[bytes], 18 | frozen_set: FrozenSet[int], 19 | none_or_str: Optional[str], 20 | sequence_of_ints: Sequence[int], 21 | compound: Dict[str, List[Set[int]]], 22 | deque: Deque[int], 23 | ): 24 | print(f"simple_list: {simple_list}") 25 | print(f"list_of_ints: {list_of_ints}") 26 | print(f"simple_tuple: {simple_tuple}") 27 | print(f"multi_typed_tuple: {multi_typed_tuple}") 28 | print(f"simple_dict: {simple_dict}") 29 | print(f"dict_str_float: {dict_str_float}") 30 | print(f"simple_set: {simple_set}") 31 | print(f"set_bytes: {set_bytes}") 32 | print(f"frozen_set: {frozen_set}") 33 | print(f"none_or_str: {none_or_str}") 34 | print(f"sequence_of_ints: {sequence_of_ints}") 35 | print(f"compound: {compound}") 36 | print(f"deque: {deque}") 37 | 38 | 39 | if __name__ == "__main__": 40 | cli() 41 | -------------------------------------------------------------------------------- /examples/typing/primitive.py: -------------------------------------------------------------------------------- 1 | from argdantic import ArgParser 2 | 3 | cli = ArgParser() 4 | 5 | 6 | @cli.command() 7 | def status(name: str, age: int, weight: float, data: bytes, flag: bool): 8 | print(f"name: {name}") 9 | print(f"age: {age}") 10 | print(f"weight: {weight}") 11 | print(f"data: {data}") 12 | print(f"flag: {flag}") 13 | 14 | 15 | if __name__ == "__main__": 16 | cli() 17 | -------------------------------------------------------------------------------- /mkdocs.yml: -------------------------------------------------------------------------------- 1 | site_name: Argdantic 2 | repo_url: https://github.com/edornd/argdantic 3 | repo_name: edornd/argdantic 4 | theme: 5 | name: material 6 | icon: 7 | admonition: 8 | note: octicons/tag-16 9 | abstract: octicons/checklist-16 10 | info: octicons/info-16 11 | tip: octicons/squirrel-16 12 | success: octicons/check-16 13 | question: octicons/question-16 14 | warning: octicons/alert-16 15 | failure: octicons/x-circle-16 16 | danger: octicons/zap-16 17 | bug: octicons/bug-16 18 | example: octicons/beaker-16 19 | quote: octicons/quote-16 20 | palette: 21 | - scheme: default 22 | primary: deep orange 23 | accent: red 24 | toggle: 25 | icon: material/weather-night 26 | name: Switch to dark mode 27 | - scheme: slate 28 | primary: deep orange 29 | accent: red 30 | toggle: 31 | icon: material/weather-sunny 32 | name: Switch to light mode 33 | 34 | nav: 35 | - Home: index.md 36 | - User Guide: 37 | - guide/intro.md 38 | - guide/customization.md 39 | - guide/types.md 40 | - guide/composition.md 41 | - guide/sources.md 42 | 43 | markdown_extensions: 44 | - admonition 45 | - pymdownx.highlight: 46 | anchor_linenums: true 47 | - pymdownx.inlinehilite 48 | - pymdownx.snippets 49 | - pymdownx.superfences 50 | - mdx_include 51 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | build-backend = "flit_core.buildapi" 3 | requires = ["flit_core >=3.2,<4"] 4 | 5 | [project] 6 | authors = [{ name = "Edoardo Arnaudo", email = "edoardo.arn@gmail.com" }] 7 | classifiers = [ 8 | "Development Status :: 4 - Beta", 9 | "Topic :: Software Development", 10 | "Operating System :: OS Independent", 11 | "Programming Language :: Python :: 3.8", 12 | "Programming Language :: Python :: 3.9", 13 | "Programming Language :: Python :: 3.10", 14 | "Programming Language :: Python :: 3.11", 15 | "Programming Language :: Python :: 3.12", 16 | "Typing :: Typed", 17 | "Intended Audience :: Information Technology", 18 | "Intended Audience :: Science/Research", 19 | "Intended Audience :: Developers", 20 | "License :: OSI Approved :: MIT License", 21 | ] 22 | dependencies = [ 23 | "pydantic >= 2.8.0, < 3.0", 24 | "pydantic-settings >= 2.4.0, < 3" 25 | ] 26 | dynamic = ["version", "description"] 27 | license = { file = "LICENSE" } 28 | name = "argdantic" 29 | readme = "README.md" 30 | 31 | [project.urls] 32 | Home = "https://github.com/edornd" 33 | 34 | [project.optional-dependencies] 35 | all = [ 36 | "python-dotenv >= 1.0.0, < 2.0", 37 | "orjson >= 3.10.0, < 4.0", 38 | "toml >= 0.10.0, < 1.0", 39 | "tomli >= 2.0, < 3.0", 40 | "pyyaml >= 6.0.0, < 7.0", 41 | "tomli-w >= 1.0.0, < 2.0", 42 | ] 43 | dev = [ 44 | "flit >= 3.9.0, < 4.0", 45 | "ruff >= 0.5.6, < 1.0", 46 | ] 47 | docs = [ 48 | "mkdocs >= 1.6.0, < 2.0", 49 | "mkdocs-material >= 9.5.0, < 10.0", 50 | "mdx-include >= 1.4.0, < 2.0", 51 | ] 52 | env = [ 53 | "python-dotenv >= 1.0.0, < 2.0" 54 | ] 55 | json = [ 56 | "orjson >= 3.10.0, < 4.0" 57 | ] 58 | test = [ 59 | "coverage >= 7.6.0, < 8.0", 60 | "mock >= 5.1.0, < 6.0", 61 | "pytest >= 8.3.0, < 9.0", 62 | "pytest-cov >= 5.0.0, < 6.0", 63 | "pytest-xdist >= 3.6.0, < 4.0", 64 | ] 65 | toml = [ 66 | "toml >= 0.10.0, < 1.0", 67 | "tomli >= 2.0, < 3.0", 68 | "tomli-w >= 1.0.0, < 2.0", 69 | ] 70 | yaml = [ 71 | "pyyaml >= 6.0.0, < 7.0" 72 | ] 73 | 74 | [tool.ruff] 75 | line-length = 120 76 | 77 | [tool.mypy] 78 | modules = "argdantic" 79 | exclude = ["tests"] 80 | 81 | [tool.pytest.ini_options] 82 | addopts = "-ra --capture=sys" 83 | log_cli = true 84 | log_cli_level = "info" 85 | minversion = "6.0" 86 | testpaths = ["tests"] 87 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/edornd/argdantic/9e9d7340d4789ba2617baf323504b8fd14396a99/tests/__init__.py -------------------------------------------------------------------------------- /tests/conftest.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from argdantic.testing import CLIRunner 4 | 5 | 6 | @pytest.fixture(scope="function") 7 | def runner(): 8 | return CLIRunner() 9 | -------------------------------------------------------------------------------- /tests/test_basic.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | import logging 3 | 4 | import pytest 5 | from pydantic import BaseModel, Field 6 | from pytest import CaptureFixture 7 | 8 | from argdantic import ArgField, ArgParser 9 | from argdantic.testing import CLIRunner 10 | 11 | LOG = logging.getLogger(__name__) 12 | 13 | 14 | def test_empty(runner: CLIRunner, capsys: CaptureFixture): 15 | parser = ArgParser() 16 | 17 | @parser.command() 18 | def empty(): 19 | print("Hello world") 20 | 21 | runner.invoke(parser, []) 22 | output = capsys.readouterr() 23 | LOG.debug(output) 24 | assert output.err.rstrip() == "" 25 | assert output.out.rstrip() == "Hello world" 26 | 27 | 28 | def test_repr(): 29 | cli = ArgParser() 30 | 31 | @cli.command() 32 | def command1(): 33 | pass 34 | 35 | @cli.command() 36 | def command2(): 37 | pass 38 | 39 | LOG.debug(str(command1)) 40 | LOG.debug(str(command2)) 41 | LOG.debug(str(cli)) 42 | assert cli.entrypoint is None 43 | assert isinstance(cli.commands, list) 44 | assert len(cli.commands) == 2 45 | assert repr(command1) == "" 46 | assert repr(command2) == "" 47 | assert repr(cli) == ", ])>" 48 | 49 | 50 | def test_repr_named(): 51 | cli = ArgParser(name="cli") 52 | 53 | @cli.command(name="cmd1") 54 | def command1(): 55 | pass 56 | 57 | @cli.command(name="cmd2") 58 | def command2(): 59 | pass 60 | 61 | LOG.debug(str(command1)) 62 | LOG.debug(str(command2)) 63 | LOG.debug(str(cli)) 64 | assert cli.entrypoint is None 65 | assert isinstance(cli.commands, list) 66 | assert len(cli.commands) == 2 67 | assert repr(command1) == "" 68 | assert repr(command2) == "" 69 | assert repr(cli) == ", ])>" 70 | 71 | 72 | def test_empty_help(runner: CLIRunner, capsys: CaptureFixture): 73 | parser = ArgParser() 74 | 75 | @parser.command() 76 | def empty(): 77 | print("Hello world") 78 | 79 | runner.invoke(parser, ["--help"]) 80 | output = capsys.readouterr() 81 | LOG.debug(output) 82 | assert output.err.rstrip() == "" 83 | assert "usage" in output.out.rstrip() 84 | assert "-h, --help show this help message and exit" in output.out.rstrip() 85 | 86 | 87 | def test_empty_help_named(runner: CLIRunner, capsys: CaptureFixture): 88 | parser = ArgParser(name="mark") 89 | 90 | @parser.command() 91 | def empty(): 92 | print("Hello world") 93 | 94 | runner.invoke(parser, ["--help"]) 95 | output = capsys.readouterr() 96 | LOG.debug(output) 97 | assert output.err.rstrip() == "" 98 | assert "usage: mark" in output.out.rstrip() 99 | assert "-h, --help" in output.out.rstrip() 100 | assert "show this help message and exit" in output.out.rstrip() 101 | 102 | 103 | def test_missing_annotation(runner: CLIRunner, capsys: CaptureFixture): 104 | parser = ArgParser() 105 | 106 | with pytest.raises(AssertionError): 107 | 108 | @parser.command() 109 | def empty(a): 110 | print(f"Hello {a}") 111 | 112 | 113 | def test_build_entrypoint(runner: CLIRunner, capsys: CaptureFixture): 114 | parser = ArgParser() 115 | 116 | @parser.command() 117 | def empty(): 118 | print("Hello world") 119 | 120 | try: 121 | parser() 122 | except SystemExit: 123 | pass 124 | assert parser.entrypoint is not None 125 | assert isinstance(parser.entrypoint, argparse.ArgumentParser) 126 | 127 | 128 | def test_multiple_commands(runner: CLIRunner, capsys: CaptureFixture): 129 | parser = ArgParser() 130 | 131 | @parser.command() 132 | def empty(): 133 | print("Hello world") 134 | 135 | @parser.command() 136 | def empty2(): 137 | print("Hello world 2") 138 | 139 | runner.invoke(parser, ["empty"]) 140 | assert parser.entrypoint is not None 141 | assert isinstance(parser.entrypoint, argparse.ArgumentParser) 142 | assert len(parser.commands) == 2 143 | assert parser.commands[0].name == "empty" 144 | assert parser.commands[1].name == "empty2" 145 | output = capsys.readouterr() 146 | LOG.debug(output) 147 | assert output.err.rstrip() == "" 148 | assert output.out.rstrip() == "Hello world" 149 | runner.invoke(parser, ["empty2"]) 150 | output = capsys.readouterr() 151 | LOG.debug(output) 152 | assert output.err.rstrip() == "" 153 | assert output.out.rstrip() == "Hello world 2" 154 | 155 | 156 | def test_add_parser_no_name(): 157 | parser1 = ArgParser() 158 | parser2 = ArgParser() 159 | 160 | @parser1.command() 161 | def empty1(): 162 | print("Hello world 1") 163 | 164 | @parser2.command() 165 | def empty2(): 166 | print("Hello world 2") 167 | 168 | with pytest.raises(AssertionError): 169 | parser1.add_parser(parser2) 170 | 171 | 172 | def test_add_parser_named(runner: CLIRunner, capsys: CaptureFixture): 173 | parser1 = ArgParser(name="parser1") 174 | parser2 = ArgParser(name="parser2") 175 | 176 | @parser1.command() 177 | def empty1(): 178 | print("Hello world 1") 179 | 180 | @parser2.command() 181 | def empty2(): 182 | print("Hello world 2") 183 | 184 | parser1.add_parser(parser2) 185 | assert len(parser1.commands) == 1 186 | assert len(parser1.groups) == 1 187 | assert len(parser2.commands) == 1 188 | assert len(parser2.groups) == 0 189 | runner.invoke(parser1, []) 190 | output = capsys.readouterr() 191 | LOG.debug(output) 192 | assert "usage: parser1" in output.err.rstrip() 193 | runner.invoke(parser1, ["empty1"]) 194 | output = capsys.readouterr() 195 | assert output.err.rstrip() == "" 196 | assert output.out.rstrip() == "Hello world 1" 197 | runner.invoke(parser1, ["parser2"]) 198 | output = capsys.readouterr() 199 | assert output.err.rstrip() == "" 200 | assert output.out.rstrip() == "Hello world 2" 201 | 202 | 203 | def test_add_named_parser_force_group(runner: CLIRunner, capsys: CaptureFixture): 204 | parser1 = ArgParser(name="parser1", force_group=True) 205 | parser2 = ArgParser(name="parser2", force_group=True) 206 | 207 | @parser1.command() 208 | def empty1(): 209 | print("Hello world 1") 210 | 211 | @parser2.command() 212 | def empty2(): 213 | print("Hello world 2") 214 | 215 | parser1.add_parser(parser2) 216 | assert len(parser1.commands) == 1 217 | assert len(parser1.groups) == 1 218 | assert len(parser2.commands) == 1 219 | assert len(parser2.groups) == 0 220 | runner.invoke(parser1, []) 221 | output = capsys.readouterr() 222 | LOG.debug(output) 223 | assert "usage: parser1" in output.err.rstrip() 224 | runner.invoke(parser1, ["empty1"]) 225 | output = capsys.readouterr() 226 | assert output.err.rstrip() == "" 227 | assert output.out.rstrip() == "Hello world 1" 228 | runner.invoke(parser1, ["parser2"]) 229 | output = capsys.readouterr() 230 | assert "usage:" in output.err.rstrip() 231 | runner.invoke(parser1, ["parser2", "empty2"]) 232 | output = capsys.readouterr() 233 | assert output.err.rstrip() == "" 234 | assert output.out.rstrip() == "Hello world 2" 235 | 236 | 237 | def test_add_parser_pass_name(runner: CLIRunner, capsys: CaptureFixture): 238 | parser1 = ArgParser() 239 | parser2 = ArgParser() 240 | 241 | @parser1.command() 242 | def empty1(): 243 | print("Hello world 1") 244 | 245 | @parser2.command() 246 | def empty2(): 247 | print("Hello world 2") 248 | 249 | parser1.add_parser(parser2, name="parser2") 250 | assert len(parser1.commands) == 1 251 | assert len(parser1.groups) == 1 252 | assert len(parser2.commands) == 1 253 | assert len(parser2.groups) == 0 254 | runner.invoke(parser1, []) 255 | output = capsys.readouterr() 256 | LOG.debug(output) 257 | assert "usage:" in output.err.rstrip() 258 | runner.invoke(parser1, ["empty1"]) 259 | output = capsys.readouterr() 260 | assert output.err.rstrip() == "" 261 | assert output.out.rstrip() == "Hello world 1" 262 | runner.invoke(parser1, ["parser2"]) 263 | output = capsys.readouterr() 264 | assert output.err.rstrip() == "" 265 | assert output.out.rstrip() == "Hello world 2" 266 | 267 | 268 | def test_pydantic_model(runner: CLIRunner, capsys: CaptureFixture): 269 | parser = ArgParser() 270 | 271 | class Config(BaseModel): 272 | a: str 273 | b: int 274 | 275 | @parser.command() 276 | def empty(cfg: Config): 277 | print(f"{cfg.a} {cfg.b}") 278 | 279 | runner.invoke(parser, ["--cfg.a=hello", "--cfg.b=42"]) 280 | output = capsys.readouterr() 281 | LOG.debug(output) 282 | assert output.err.rstrip() == "" 283 | assert output.out.rstrip() == "hello 42" 284 | 285 | 286 | def test_field_description(runner: CLIRunner, capsys: CaptureFixture): 287 | parser = ArgParser() 288 | 289 | class Config(BaseModel): 290 | a: str = Field(description="A string") 291 | b: int = Field(description="An integer") 292 | 293 | @parser.command() 294 | def empty(cfg: Config): 295 | print(f"{cfg.a} {cfg.b}") 296 | 297 | runner.invoke(parser, ["--help"]) 298 | output = capsys.readouterr() 299 | LOG.debug(output) 300 | assert output.err.rstrip() == "" 301 | assert "A string" in output.out.rstrip() 302 | assert "An integer" in output.out.rstrip() 303 | 304 | 305 | def test_field_default(runner: CLIRunner, capsys: CaptureFixture): 306 | parser = ArgParser() 307 | 308 | class Config(BaseModel): 309 | a: str = Field("hello") 310 | b: int = Field(42) 311 | 312 | @parser.command() 313 | def empty(cfg: Config = Config()): 314 | print(f"{cfg.a} {cfg.b}") 315 | 316 | runner.invoke(parser, []) 317 | output = capsys.readouterr() 318 | LOG.debug(output) 319 | assert output.err.rstrip() == "" 320 | assert output.out.rstrip() == "hello 42" 321 | 322 | 323 | def test_field_defaults_new_values(runner: CLIRunner, capsys: CaptureFixture): 324 | parser = ArgParser() 325 | 326 | class Config(BaseModel): 327 | a: str = Field("hello") 328 | b: int = Field(42) 329 | 330 | @parser.command() 331 | def empty(cfg: Config): 332 | print(f"{cfg.a} {cfg.b}") 333 | 334 | runner.invoke(parser, ["--cfg.a=world", "--cfg.b=43"]) 335 | output = capsys.readouterr() 336 | LOG.debug(output) 337 | assert output.err.rstrip() == "" 338 | assert output.out.rstrip() == "world 43" 339 | 340 | 341 | def test_arg_field(runner: CLIRunner, capsys: CaptureFixture): 342 | parser = ArgParser() 343 | 344 | class Config(BaseModel): 345 | a: str = ArgField(default="hello", description="A string") 346 | b: int = ArgField(default=42, description="An integer") 347 | 348 | @parser.command() 349 | def empty(cfg: Config): 350 | print(f"{cfg.a} {cfg.b}") 351 | 352 | runner.invoke(parser, ["--help"]) 353 | output = capsys.readouterr() 354 | LOG.debug(output) 355 | assert output.err.rstrip() == "" 356 | assert "A string" in output.out.rstrip() 357 | assert "An integer" in output.out.rstrip() 358 | runner.invoke(parser, ["--cfg.a=world", "--cfg.b=43"]) 359 | output = capsys.readouterr() 360 | LOG.debug(output) 361 | assert output.err.rstrip() == "" 362 | assert output.out.rstrip() == "world 43" 363 | 364 | 365 | def test_arg_field_in_function(runner: CLIRunner, capsys: CaptureFixture): 366 | parser = ArgParser() 367 | 368 | @parser.command() 369 | def empty( 370 | a: str = ArgField(default="hello", description="A string"), 371 | b: int = ArgField(default=42, description="An integer"), 372 | ): 373 | print(f"{a} {b}") 374 | 375 | runner.invoke(parser, ["--help"]) 376 | output = capsys.readouterr() 377 | LOG.debug(output) 378 | assert output.err.rstrip() == "" 379 | assert "A string" in output.out.rstrip() 380 | assert "An integer" in output.out.rstrip() 381 | runner.invoke(parser, ["--a=world", "--b=43"]) 382 | output = capsys.readouterr() 383 | LOG.debug(output) 384 | assert output.err.rstrip() == "" 385 | assert output.out.rstrip() == "world 43" 386 | 387 | 388 | def test_arg_field_names(runner: CLIRunner, capsys: CaptureFixture): 389 | parser = ArgParser() 390 | 391 | class Config(BaseModel): 392 | a: str = ArgField("--foo", "-f", default="hello", description="A string") 393 | b: int = ArgField("--bar", "-b", default=42, description="An integer") 394 | 395 | @parser.command() 396 | def empty(cfg: Config): 397 | print(f"{cfg.a} {cfg.b}") 398 | 399 | runner.invoke(parser, ["--help"]) 400 | output = capsys.readouterr() 401 | stripped = output.out.rstrip() 402 | LOG.debug(stripped) 403 | assert output.err.rstrip() == "" 404 | assert "--cfg.a" in stripped 405 | assert "--cfg.b" in stripped 406 | assert "--foo" in stripped 407 | assert "--bar" in stripped 408 | assert "-f" in stripped 409 | assert "-b" in stripped 410 | assert "A string" in stripped 411 | assert "An integer" in stripped 412 | 413 | runner.invoke(parser, ["--cfg.a=world", "--cfg.b=43"]) 414 | output = capsys.readouterr() 415 | stripped = output.out.rstrip() 416 | LOG.debug(stripped) 417 | assert output.err.rstrip() == "" 418 | assert stripped == "world 43" 419 | 420 | runner.invoke(parser, ["--foo=world", "--bar=43"]) 421 | output = capsys.readouterr() 422 | stripped = output.out.rstrip() 423 | LOG.debug(stripped) 424 | assert output.err.rstrip() == "" 425 | assert stripped == "world 43" 426 | 427 | runner.invoke(parser, ["-f=world", "-b=43"]) 428 | output = capsys.readouterr() 429 | stripped = output.out.rstrip() 430 | LOG.debug(stripped) 431 | assert output.err.rstrip() == "" 432 | assert stripped == "world 43" 433 | 434 | 435 | def test_validation_error(runner: CLIRunner, capsys: CaptureFixture): 436 | parser = ArgParser() 437 | 438 | class Config(BaseModel): 439 | a: str = ArgField("--foo", "-f", description="A string", min_length=10) 440 | b: int = ArgField("--bar", "-b", description="An integer", gt=43) 441 | 442 | @parser.command() 443 | def empty(cfg: Config): 444 | print(f"{cfg.a} {cfg.b}") 445 | 446 | runner.invoke(parser, ["--cfg.a=short", "--cfg.b=42"]) 447 | output = capsys.readouterr() 448 | LOG.debug(output) 449 | assert "cfg -> a: String should have at least 10 characters" in output.err.rstrip() 450 | assert "cfg -> b: Input should be greater than 43" in output.err.rstrip() 451 | assert output.out.rstrip() == "" 452 | 453 | runner.invoke(parser, ["--foo=verylongname", "--bar=44"]) 454 | output = capsys.readouterr() 455 | stripped = output.out.rstrip() 456 | LOG.debug(stripped) 457 | assert output.err.rstrip() == "" 458 | assert stripped == "verylongname 44" 459 | 460 | 461 | def test_singleton_multiple_arguments(runner: CLIRunner): 462 | parser = ArgParser() 463 | 464 | class Config(BaseModel): 465 | a: str 466 | b: int 467 | 468 | with pytest.raises(AssertionError): 469 | 470 | @parser.command(singleton=True) 471 | def empty(cfg: Config, seed: int): 472 | print(cfg, seed) 473 | 474 | 475 | def test_singleton_without_model(runner: CLIRunner): 476 | parser = ArgParser() 477 | 478 | with pytest.raises(AssertionError): 479 | 480 | @parser.command(singleton=True) 481 | def empty(seed: int): 482 | print(seed) 483 | 484 | 485 | def test_singleton_command(runner: CLIRunner, capsys: CaptureFixture): 486 | parser = ArgParser() 487 | 488 | class Config(BaseModel): 489 | a: str = ArgField("--foo", "-f", description="A string", min_length=10) 490 | b: int = ArgField("--bar", "-b", description="An integer", gt=43) 491 | 492 | @parser.command(singleton=True) 493 | def empty(cfg: Config): 494 | print(f"{cfg.a} {cfg.b}") 495 | 496 | runner.invoke(parser, ["--a=short", "--b=42"]) 497 | output = capsys.readouterr() 498 | LOG.debug(output) 499 | assert "a: String should have at least 10 characters" in output.err.rstrip() 500 | assert "b: Input should be greater than 43" in output.err.rstrip() 501 | assert output.out.rstrip() == "" 502 | 503 | runner.invoke(parser, ["--foo=verylongname", "--bar=44"]) 504 | output = capsys.readouterr() 505 | stripped = output.out.rstrip() 506 | LOG.debug(stripped) 507 | assert output.err.rstrip() == "" 508 | assert stripped == "verylongname 44" 509 | -------------------------------------------------------------------------------- /tests/test_conversion.py: -------------------------------------------------------------------------------- 1 | import logging 2 | from collections import deque 3 | from typing import Deque, Dict, FrozenSet, List, Literal, Sequence, Set, Tuple 4 | 5 | import pytest 6 | from pytest import CaptureFixture 7 | 8 | from argdantic import ArgParser 9 | from argdantic.parsing.actions import ( 10 | AppendAction, 11 | StoreAction, 12 | StoreFalseAction, 13 | StoreTrueAction, 14 | ) 15 | from argdantic.testing import CLIRunner 16 | 17 | LOG = logging.getLogger(__name__) 18 | 19 | 20 | def test_store_action_init(): 21 | action = StoreAction(["--a"], dest="a", metavar="A", help="help") 22 | assert action.option_strings == ["--a"] 23 | assert action.dest == "a" 24 | assert action.nargs is None 25 | assert action.const is None 26 | assert action.default is None 27 | assert action.type is None 28 | assert action.choices is None 29 | assert action.required is False 30 | assert action.metavar == "A" 31 | assert action.help == "help" 32 | 33 | 34 | def test_store_true_action_init(): 35 | action = StoreTrueAction(["--a"], dest="a", metavar="A", help="help") 36 | assert action.option_strings == ["--a"] 37 | assert action.dest == "a" 38 | assert action.nargs == 0 39 | assert action.const is True 40 | assert action.type is None 41 | assert action.choices is None 42 | assert action.required is False 43 | assert action.metavar == "A" 44 | assert action.help == "help" 45 | 46 | 47 | def test_store_false_action_init(): 48 | action = StoreFalseAction(["--a"], dest="a", metavar="A", help="help") 49 | assert action.option_strings == ["--a"] 50 | assert action.dest == "a" 51 | assert action.nargs == 0 52 | assert action.const is False 53 | assert action.type is None 54 | assert action.choices is None 55 | assert action.required is False 56 | assert action.metavar == "A" 57 | assert action.help == "help" 58 | 59 | 60 | def test_append_action_exceptions(): 61 | with pytest.raises(ValueError): 62 | AppendAction(["--a"], dest="a", nargs=0) 63 | with pytest.raises(ValueError): 64 | AppendAction(["--a"], dest="a", nargs="+", const=1) 65 | 66 | 67 | def test_primitives(runner: CLIRunner, capsys: CaptureFixture): 68 | parser = ArgParser() 69 | 70 | @parser.command() 71 | def primitives( 72 | a: int = 1, 73 | b: float = 1.0, 74 | c: str = "hello", 75 | d: bool = True, 76 | e: bytes = b"hello", 77 | ): 78 | print(a, b, c, d, e) 79 | 80 | runner.invoke(parser, []) 81 | output = capsys.readouterr() 82 | LOG.debug(output) 83 | assert output.err.rstrip() == "" 84 | assert output.out.rstrip() == "1 1.0 hello True b'hello'" 85 | 86 | 87 | def test_sequences(runner: CLIRunner, capsys: CaptureFixture): 88 | parser = ArgParser() 89 | 90 | @parser.command() 91 | def sequences( 92 | a: list = [], 93 | b: List[int] = [1, 2, 3], 94 | c: tuple = (1, 2, 3), 95 | d: Tuple[int, float, str, bool] = (1, 1.0, "hello", True), 96 | e: set = {"a"}, 97 | f: Set[bytes] = {b"a"}, 98 | g: FrozenSet[int] = frozenset({1, 2, 3}), 99 | h: Sequence[int] = [1, 2, 3], 100 | i: Deque[int] = deque([1, 2, 3]), 101 | ): 102 | return a, b, c, d, e, f, g, h, i 103 | 104 | result = runner.invoke(parser, []) 105 | assert result.return_value == ( 106 | [], 107 | [1, 2, 3], 108 | (1, 2, 3), 109 | (1, 1.0, "hello", True), 110 | {"a"}, 111 | {b"a"}, 112 | frozenset({1, 2, 3}), 113 | [1, 2, 3], 114 | deque([1, 2, 3]), 115 | ) 116 | 117 | 118 | def test_mappings(runner: CLIRunner, capsys: CaptureFixture): 119 | parser = ArgParser() 120 | runner = CLIRunner(catch_exceptions=False) 121 | 122 | @parser.command() 123 | def mappings( 124 | a: dict = {}, 125 | b: Dict[str, float] = {"a": 1.0, "b": 2.0}, 126 | c: Dict[str, List[Set[int]]] = {"a": [{1, 2}, {3, 4}]}, 127 | ): 128 | print(a, b, c) 129 | 130 | runner.invoke(parser, []) 131 | output = capsys.readouterr() 132 | LOG.debug(output) 133 | assert output.out.rstrip() == "{} {'a': 1.0, 'b': 2.0} {'a': [{1, 2}, {3, 4}]}" 134 | 135 | 136 | def test_enums(runner: CLIRunner, capsys: CaptureFixture): 137 | from enum import Enum 138 | 139 | class Color(Enum): 140 | RED = 1 141 | GREEN = 2 142 | BLUE = 3 143 | 144 | parser = ArgParser() 145 | 146 | @parser.command() 147 | def enums(a: Literal["yellow", "purple"] = "yellow", b: Color = Color.RED): 148 | print(a, b) 149 | 150 | result = runner.invoke(parser, ["--a=purple", "--b=GREEN"]) 151 | output = capsys.readouterr() 152 | LOG.debug(output) 153 | assert result.exception is None 154 | assert output.out.rstrip() == "purple Color.GREEN" 155 | 156 | 157 | def test_primitives_help(runner: CLIRunner, capsys: CaptureFixture): 158 | parser = ArgParser() 159 | 160 | @parser.command() 161 | def primitives( 162 | a: int = 1, 163 | b: float = 1.0, 164 | c: str = "hello", 165 | d: bool = True, 166 | e: bytes = b"hello", 167 | ): 168 | print(a, b, c, d, e) 169 | 170 | runner.invoke(parser, ["--help"]) 171 | output = capsys.readouterr() 172 | LOG.debug(output.out) 173 | stripped_out = output.out.rstrip() 174 | assert "usage" in stripped_out 175 | assert "--a INT" in stripped_out 176 | assert "--b FLOAT" in stripped_out 177 | assert "--c TEXT" in stripped_out 178 | assert "--d" in stripped_out 179 | assert "--no-d" in stripped_out 180 | assert "--e BYTES" in stripped_out 181 | assert "-h, --help" in stripped_out 182 | assert "show this help message and exit" in stripped_out 183 | 184 | 185 | def test_sequences_help(runner: CLIRunner, capsys: CaptureFixture): 186 | parser = ArgParser() 187 | 188 | @parser.command() 189 | def sequences( 190 | a: list = [], 191 | b: List[int] = [1, 2, 3], 192 | c: tuple = (1, 2, 3), 193 | d: Tuple[int, float, str, bool] = (1, 1.0, "hello", True), 194 | e: set = {"a"}, 195 | f: Set[bytes] = {b"a"}, 196 | g: FrozenSet[int] = {1, 2, 3}, 197 | h: Sequence[int] = [1, 2, 3], 198 | i: Deque[int] = [1, 2, 3], 199 | ): 200 | print(a, b, c, d, e, f, g, h, i) 201 | 202 | runner.invoke(parser, ["--help"]) 203 | output = capsys.readouterr() 204 | LOG.debug(output.out) 205 | stripped_out = output.out.rstrip() 206 | assert "usage" in stripped_out 207 | assert "--a" in stripped_out 208 | assert "--b" in stripped_out 209 | assert "--c" in stripped_out 210 | assert "--d INT FLOAT TEXT BOOL" in stripped_out 211 | assert "--e" in stripped_out 212 | assert "--f" in stripped_out 213 | assert "--g" in stripped_out 214 | assert "--h" in stripped_out 215 | assert "--i" in stripped_out 216 | assert "-h, --help" in stripped_out 217 | assert "show this help message and exit" in stripped_out 218 | 219 | 220 | def test_mappings_help(runner: CLIRunner, capsys: CaptureFixture): 221 | parser = ArgParser() 222 | 223 | @parser.command() 224 | def mappings( 225 | a: dict = {}, 226 | b: Dict[str, float] = {"a": 1.0, "b": 2.0}, 227 | c: Dict[str, List[Set[int]]] = {"a": [{1, 2}, {3, 4}]}, 228 | ): 229 | print(a, b, c) 230 | 231 | runner.invoke(parser, ["--help"]) 232 | output = capsys.readouterr() 233 | LOG.debug(output.out) 234 | stripped_out = output.out.rstrip() 235 | assert "usage" in stripped_out 236 | assert "--a JSON" in stripped_out 237 | assert "--b JSON" in stripped_out 238 | assert "--c JSON" in stripped_out 239 | assert "-h, --help" in stripped_out 240 | assert "show this help message and exit" in stripped_out 241 | 242 | 243 | def test_enums_help(runner: CLIRunner, capsys: CaptureFixture): 244 | from enum import Enum 245 | 246 | class Color(Enum): 247 | RED = 1 248 | GREEN = 2 249 | BLUE = 3 250 | 251 | parser = ArgParser() 252 | 253 | @parser.command() 254 | def enums(a: Literal["yellow", "purple"] = "yellow", b: Color = Color.RED): 255 | print(a, b) 256 | 257 | runner.invoke(parser, ["--help"]) 258 | output = capsys.readouterr() 259 | LOG.debug(output.out) 260 | stripped_out = output.out.rstrip() 261 | assert "usage" in stripped_out 262 | assert "--a [yellow|purple]" in stripped_out 263 | assert "--b [RED|GREEN|BLUE]" in stripped_out 264 | assert "-h, --help" in stripped_out 265 | assert "show this help message and exit" in stripped_out 266 | -------------------------------------------------------------------------------- /tests/test_sources/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/edornd/argdantic/9e9d7340d4789ba2617baf323504b8fd14396a99/tests/test_sources/__init__.py -------------------------------------------------------------------------------- /tests/test_sources/test_json.py: -------------------------------------------------------------------------------- 1 | import json 2 | import logging 3 | from pathlib import Path 4 | from typing import Any, Dict 5 | 6 | import mock 7 | import pytest 8 | from pydantic import BaseModel 9 | from pytest import CaptureFixture 10 | 11 | from argdantic.sources.base import FileSettingsSourceBuilder 12 | from argdantic.testing import CLIRunner 13 | 14 | LOG = logging.getLogger(__name__) 15 | 16 | 17 | def create_json_file(data: Dict[str, Any], path: Path) -> Path: 18 | path.write_text(json.dumps(data)) 19 | return path 20 | 21 | 22 | class TestConfig(BaseModel): 23 | __test__ = False 24 | foo: str 25 | bar: int 26 | 27 | 28 | def test_json_no_import_error(tmp_path: Path) -> None: 29 | from argdantic.sources.json import JsonSettingsSource 30 | 31 | with mock.patch.dict("sys.modules", {"orjson": None}): 32 | path = create_json_file({"foo": "baz", "bar": 42}, tmp_path / "settings.json") 33 | source_spawner = JsonSettingsSource(path) 34 | assert isinstance(source_spawner, FileSettingsSourceBuilder) 35 | assert source_spawner(TestConfig)() == {"foo": "baz", "bar": 42} 36 | assert repr(source_spawner) == f"" 37 | 38 | 39 | def test_json_source(tmp_path: Path) -> None: 40 | from argdantic.sources.json import JsonSettingsSource 41 | 42 | path = create_json_file({"foo": "baz", "bar": 42}, tmp_path / "settings.json") 43 | source_spawner = JsonSettingsSource(path) 44 | assert isinstance(source_spawner, FileSettingsSourceBuilder) 45 | assert source_spawner(TestConfig)() == {"foo": "baz", "bar": 42} 46 | assert repr(source_spawner) == f"" 47 | 48 | 49 | def test_parser_using_json_source(tmp_path: Path, runner: CLIRunner) -> None: 50 | from argdantic import ArgParser 51 | from argdantic.sources.json import JsonSettingsSource 52 | 53 | path = create_json_file({"foo": "baz", "bar": 42}, tmp_path / "settings.json") 54 | parser = ArgParser() 55 | 56 | @parser.command(sources=[JsonSettingsSource(path)]) 57 | def main(foo: str = None, bar: int = None) -> None: 58 | return foo, bar 59 | 60 | result = runner.invoke(parser, []) 61 | assert result.exception is None 62 | assert result.return_value == ("baz", 42) 63 | 64 | 65 | def test_dynamic_source_validations(tmp_path: Path, runner: CLIRunner, capsys: CaptureFixture) -> None: 66 | from argdantic.sources import JsonFileLoader, from_file 67 | 68 | # non pydantic model 69 | with pytest.raises(TypeError): 70 | 71 | @from_file(loader=JsonFileLoader, required=False) 72 | class TestModel: 73 | foo: str = "default" 74 | bar: int = 0 75 | 76 | # specified field not in given 77 | with pytest.raises(ValueError): 78 | 79 | @from_file(loader=JsonFileLoader, required=False, use_field="baz") 80 | class TestModel(BaseModel): 81 | foo: str = "default" 82 | bar: int = 0 83 | 84 | # specified field not a string or Path 85 | with pytest.raises(ValueError): 86 | 87 | @from_file(loader=JsonFileLoader, required=False, use_field="bar") 88 | class TestModel(BaseModel): 89 | foo: str = "default" 90 | bar: int = 0 91 | 92 | 93 | def test_dynamic_source_repr(tmp_path: Path, runner: CLIRunner, capsys: CaptureFixture) -> None: 94 | from pydantic_settings.sources import InitSettingsSource 95 | 96 | from argdantic import ArgParser 97 | from argdantic.sources import JsonFileLoader, from_file 98 | 99 | parser = ArgParser() 100 | 101 | @from_file(loader=JsonFileLoader, required=False) 102 | class TestModel(BaseModel): 103 | foo: str = "default" 104 | bar: int = 0 105 | 106 | @parser.command() 107 | def main(model: TestModel) -> None: 108 | return model.model_dump() 109 | 110 | sources = TestModel.settings_customise_sources( 111 | settings_cls=TestModel, 112 | init_settings=InitSettingsSource(settings_cls=TestModel, init_kwargs={"foo": "baz", "bar": 42}), 113 | env_settings=None, 114 | dotenv_settings=None, 115 | file_secret_settings=None, 116 | ) 117 | assert str(sources[0]) == "DynamicFileSource(source=None)" 118 | 119 | 120 | def test_dynamic_json_source_non_required(tmp_path: Path, runner: CLIRunner, capsys: CaptureFixture) -> None: 121 | from argdantic import ArgParser 122 | from argdantic.sources import JsonFileLoader, from_file 123 | 124 | parser = ArgParser() 125 | 126 | @from_file(loader=JsonFileLoader, required=False) 127 | class TestModel(BaseModel): 128 | foo: str = "default" 129 | bar: int = 0 130 | 131 | @parser.command() 132 | def main(model: TestModel = TestModel()) -> None: 133 | return model.model_dump() 134 | 135 | # check if the cli requires the model argument 136 | result = runner.invoke(parser, []) 137 | 138 | assert result.exception is None 139 | assert result.return_value == {"foo": "default", "bar": 0} 140 | 141 | 142 | def test_dynamic_json_source(tmp_path: Path, runner: CLIRunner, capsys: CaptureFixture) -> None: 143 | from argdantic import ArgParser 144 | from argdantic.sources import JsonFileLoader, from_file 145 | 146 | path = create_json_file({"foo": "baz", "bar": 42}, tmp_path / "settings.json") 147 | parser = ArgParser() 148 | 149 | @from_file(loader=JsonFileLoader) 150 | class TestModel(BaseModel): 151 | foo: str = "default" 152 | bar: int = 0 153 | 154 | @parser.command() 155 | def main(model: TestModel) -> None: 156 | return model.model_dump() 157 | 158 | # check if the cli requires the model argument 159 | result = runner.invoke(parser, []) 160 | output = capsys.readouterr() 161 | 162 | assert result.exception is None 163 | assert not output.out 164 | assert "error: the following arguments are required: --model" in output.err.rstrip() 165 | 166 | # check if the help message contains 'model', together with 'model.foo' and 'model.bar' 167 | result = runner.invoke(parser, ["--help"]) 168 | output = capsys.readouterr() 169 | assert "--model " in output.out 170 | assert "--model.foo" in output.out 171 | assert "--model.bar" in output.out 172 | 173 | # check that the model is populated with the values from the JSON file 174 | result = runner.invoke(parser, ["--model", str(path)]) 175 | assert result.exception is None 176 | assert result.return_value == {"foo": "baz", "bar": 42} 177 | 178 | # check that the CLI argument overrides the JSON file 179 | result = runner.invoke(parser, ["--model", str(path), "--model.foo", "overridden"]) 180 | assert result.exception is None 181 | assert result.return_value == {"foo": "overridden", "bar": 42} 182 | -------------------------------------------------------------------------------- /tests/test_sources/test_misc.py: -------------------------------------------------------------------------------- 1 | import platform 2 | from pathlib import Path 3 | from typing import Optional, Union 4 | 5 | import mock 6 | 7 | from argdantic import ArgParser 8 | from argdantic.sources.base import EnvSettingsSource, SecretsSettingsSource 9 | from argdantic.testing import CLIRunner 10 | 11 | 12 | def test_env_settings_source(runner: CLIRunner) -> None: 13 | source_spawner = EnvSettingsSource(env_file=".env", env_file_encoding="utf-8", env_prefix="ARGDANTIC_") 14 | assert "EnvSettingsSource" in repr(source_spawner) 15 | assert isinstance(source_spawner, EnvSettingsSource) 16 | 17 | parser = ArgParser() 18 | 19 | @parser.command(sources=[source_spawner]) 20 | def main(foo: str = None, bar: int = None) -> None: 21 | return foo, bar 22 | 23 | with mock.patch.dict("os.environ", {"ARGDANTIC_FOO": "baz", "ARGDANTIC_BAR": "42"}): 24 | result = runner.invoke(parser, []) 25 | assert result.exception is None 26 | assert result.return_value == ("baz", 42) 27 | 28 | 29 | def test_env_settings_source_case_sensitive(runner: CLIRunner) -> None: 30 | source_spawner = EnvSettingsSource( 31 | env_file=".env", 32 | env_file_encoding="utf-8", 33 | env_case_sensitive=True, 34 | ) 35 | assert "EnvSettingsSource" in repr(source_spawner) 36 | assert isinstance(source_spawner, EnvSettingsSource) 37 | 38 | parser = ArgParser() 39 | 40 | @parser.command(sources=[source_spawner]) 41 | def main(foo: Optional[str] = None, bar: Optional[int] = None) -> None: 42 | return foo, bar 43 | 44 | with mock.patch.dict("os.environ", {"foo": "baz", "bar": "42"}): 45 | result = runner.invoke(parser, []) 46 | assert result.exception is None 47 | assert result.return_value == ("baz", 42) 48 | 49 | with mock.patch.dict("os.environ", {"FOO": "baz", "bar": "42"}): 50 | result = runner.invoke(parser, []) 51 | assert result.exception is None 52 | if platform.system() == "Windows": 53 | # windows is not case sensitive 54 | assert result.return_value == ("baz", 42) 55 | else: 56 | assert result.return_value == (None, 42) 57 | 58 | 59 | def test_secrets_setting_source(runner: CLIRunner, tmp_path: Path) -> None: 60 | source = SecretsSettingsSource(secrets_dir=tmp_path) 61 | assert "SecretsSettingsSource" in repr(source) 62 | assert isinstance(source, SecretsSettingsSource) 63 | 64 | parser = ArgParser() 65 | 66 | @parser.command(sources=[source]) 67 | def main(foo: Union[str, None] = None, bar: Union[str, None] = None) -> None: 68 | return foo, bar 69 | 70 | # just check that it still runs properly 71 | result = runner.invoke(parser, []) 72 | assert result.exception is None 73 | assert result.return_value == (None, None) 74 | -------------------------------------------------------------------------------- /tests/test_sources/test_toml.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | from typing import Any, Dict 3 | 4 | import mock 5 | import pytest 6 | from pydantic import BaseModel 7 | 8 | from argdantic.sources.base import FileSettingsSourceBuilder 9 | from argdantic.testing import CLIRunner 10 | 11 | 12 | def create_toml_file(data: Dict[str, Any], path: Path) -> Path: 13 | import tomli_w 14 | 15 | path.write_text(tomli_w.dumps(data)) 16 | return path 17 | 18 | 19 | class TestConfig(BaseModel): 20 | __test__ = False 21 | foo: str 22 | bar: int 23 | 24 | 25 | def test_toml_import_error(tmp_path: Path) -> None: 26 | from argdantic.sources.toml import TomlSettingsSource 27 | 28 | with mock.patch.dict("sys.modules", {"tomli": None}): 29 | path = create_toml_file({"foo": "baz", "bar": 42}, tmp_path / "settings.toml") 30 | source_spawner = TomlSettingsSource(path) 31 | assert repr(source_spawner) == f"" 32 | assert isinstance(source_spawner, FileSettingsSourceBuilder) 33 | with pytest.raises(ImportError): 34 | source_spawner(TestConfig)() 35 | 36 | 37 | def test_toml_source(tmp_path: Path) -> None: 38 | from argdantic.sources.toml import TomlSettingsSource 39 | 40 | path = create_toml_file({"foo": "baz", "bar": 42}, tmp_path / "settings.toml") 41 | source_spawner = TomlSettingsSource(path) 42 | assert repr(source_spawner) == f"" 43 | assert isinstance(source_spawner, FileSettingsSourceBuilder) 44 | assert source_spawner(TestConfig)() == {"foo": "baz", "bar": 42} 45 | 46 | 47 | def test_parser_using_toml_source(tmp_path: Path, runner: CLIRunner) -> None: 48 | from argdantic import ArgParser 49 | from argdantic.sources import TomlSettingsSource 50 | 51 | path = create_toml_file({"foo": "baz", "bar": 42}, tmp_path / "settings.toml") 52 | parser = ArgParser() 53 | 54 | @parser.command(sources=[TomlSettingsSource(path)]) 55 | def main(foo: str = None, bar: int = None) -> None: 56 | return foo, bar 57 | 58 | result = runner.invoke(parser, []) 59 | assert result.exception is None 60 | assert result.return_value == ("baz", 42) 61 | 62 | 63 | def test_dynamic_toml_source(tmp_path: Path, runner: CLIRunner, capsys: pytest.CaptureFixture) -> None: 64 | from argdantic import ArgParser 65 | from argdantic.sources import TomlFileLoader, from_file 66 | 67 | path = create_toml_file({"foo": "baz", "bar": 42}, tmp_path / "settings.toml") 68 | parser = ArgParser() 69 | 70 | @from_file(loader=TomlFileLoader) 71 | class TestModel(BaseModel): 72 | foo: str = "default" 73 | bar: int = 0 74 | 75 | @parser.command() 76 | def main(model: TestModel) -> None: 77 | return model.model_dump() 78 | 79 | # check if the cli requires the model argument 80 | result = runner.invoke(parser, []) 81 | output = capsys.readouterr() 82 | 83 | assert result.exception is None 84 | assert not output.out 85 | assert "error: the following arguments are required: --model" in output.err.rstrip() 86 | 87 | # check if the help message contains 'model', together with 'model.foo' and 'model.bar' 88 | result = runner.invoke(parser, ["--help"]) 89 | output = capsys.readouterr() 90 | assert "--model " in output.out 91 | assert "--model.foo" in output.out 92 | assert "--model.bar" in output.out 93 | 94 | # check that the model is populated with the values from the JSON file 95 | result = runner.invoke(parser, ["--model", str(path)]) 96 | assert result.exception is None 97 | assert result.return_value == {"foo": "baz", "bar": 42} 98 | 99 | # check that the CLI argument overrides the JSON file 100 | result = runner.invoke(parser, ["--model", str(path), "--model.foo", "overridden"]) 101 | assert result.exception is None 102 | assert result.return_value == {"foo": "overridden", "bar": 42} 103 | -------------------------------------------------------------------------------- /tests/test_sources/test_yaml.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | from typing import Any, Dict 3 | 4 | import mock 5 | import pytest 6 | from pydantic import BaseModel 7 | 8 | from argdantic.sources.base import FileSettingsSourceBuilder 9 | from argdantic.testing import CLIRunner 10 | 11 | 12 | def create_yaml_file(data: Dict[str, Any], path: Path) -> Path: 13 | import yaml 14 | 15 | path.write_text(yaml.dump(data)) 16 | return path 17 | 18 | 19 | class TestConfig(BaseModel): 20 | __test__ = False 21 | foo: str 22 | bar: int 23 | 24 | 25 | def test_yaml_import_error(tmp_path: Path) -> None: 26 | from argdantic.sources.yaml import YamlSettingsSource 27 | 28 | path = create_yaml_file({"foo": "baz", "bar": 42}, tmp_path / "settings.yaml") 29 | with mock.patch.dict("sys.modules", {"yaml": None}): 30 | source_spawner = YamlSettingsSource(path) 31 | assert repr(source_spawner) == f"" 32 | assert isinstance(source_spawner, FileSettingsSourceBuilder) 33 | with pytest.raises(ImportError): 34 | source_spawner(TestConfig)() 35 | 36 | 37 | def test_yaml_source(tmp_path: Path) -> None: 38 | from argdantic.sources.yaml import YamlSettingsSource 39 | 40 | path = create_yaml_file({"foo": "baz", "bar": 42}, tmp_path / "settings.yaml") 41 | source = YamlSettingsSource(path) 42 | assert repr(source) == f"" 43 | assert isinstance(source, FileSettingsSourceBuilder) 44 | assert source(TestConfig)() == {"foo": "baz", "bar": 42} 45 | 46 | 47 | def test_parser_using_yaml_source(tmp_path: Path, runner: CLIRunner) -> None: 48 | from argdantic import ArgParser 49 | from argdantic.sources.yaml import YamlSettingsSource 50 | 51 | path = create_yaml_file({"foo": "baz", "bar": 42}, tmp_path / "settings.yaml") 52 | parser = ArgParser() 53 | 54 | @parser.command(sources=[YamlSettingsSource(path)]) 55 | def main(foo: str = None, bar: int = None) -> None: 56 | return foo, bar 57 | 58 | result = runner.invoke(parser, []) 59 | assert result.exception is None 60 | assert result.return_value == ("baz", 42) 61 | 62 | 63 | def test_dynamic_yaml_source(tmp_path: Path, runner: CLIRunner, capsys: pytest.CaptureFixture) -> None: 64 | from argdantic import ArgParser 65 | from argdantic.sources import YamlFileLoader, from_file 66 | 67 | path = create_yaml_file({"foo": "baz", "bar": 42}, tmp_path / "settings.yaml") 68 | parser = ArgParser() 69 | 70 | @from_file(loader=YamlFileLoader) 71 | class TestModel(BaseModel): 72 | foo: str = "default" 73 | bar: int = 0 74 | 75 | @parser.command() 76 | def main(model: TestModel) -> None: 77 | return model.model_dump() 78 | 79 | # check if the cli requires the model argument 80 | result = runner.invoke(parser, []) 81 | output = capsys.readouterr() 82 | 83 | assert result.exception is None 84 | assert not output.out 85 | assert "error: the following arguments are required: --model" in output.err.rstrip() 86 | 87 | # check if the help message contains 'model', together with 'model.foo' and 'model.bar' 88 | result = runner.invoke(parser, ["--help"]) 89 | output = capsys.readouterr() 90 | assert "--model " in output.out 91 | assert "--model.foo" in output.out 92 | assert "--model.bar" in output.out 93 | 94 | # check that the model is populated with the values from the JSON file 95 | result = runner.invoke(parser, ["--model", str(path)]) 96 | assert result.exception is None 97 | assert result.return_value == {"foo": "baz", "bar": 42} 98 | 99 | # check that the CLI argument overrides the JSON file 100 | result = runner.invoke(parser, ["--model", str(path), "--model.foo", "overridden"]) 101 | assert result.exception is None 102 | assert result.return_value == {"foo": "overridden", "bar": 42} 103 | 104 | 105 | def test_dynamic_yaml_source_nested_models(tmp_path: Path, runner: CLIRunner, capsys: pytest.CaptureFixture) -> None: 106 | from argdantic import ArgParser 107 | from argdantic.sources import YamlFileLoader, from_file 108 | 109 | path = create_yaml_file( 110 | { 111 | "foo": "baz", 112 | "bar": 42, 113 | "nested": {"foo": "nested_baz", "bar": 24}, 114 | }, 115 | tmp_path / "settings.yaml", 116 | ) 117 | parser = ArgParser() 118 | 119 | class NestedModel(BaseModel): 120 | foo: str = "default" 121 | bar: int = 0 122 | 123 | @from_file(loader=YamlFileLoader) 124 | class TestModel(BaseModel): 125 | foo: str = "default" 126 | bar: int = 0 127 | nested: NestedModel = NestedModel() 128 | 129 | @parser.command() 130 | def main(model: TestModel) -> None: 131 | return model.model_dump() 132 | 133 | # check that the model is populated with the values from the JSON file 134 | result = runner.invoke(parser, ["--model", str(path)]) 135 | assert result.exception is None 136 | assert result.return_value == {"foo": "baz", "bar": 42, "nested": {"foo": "nested_baz", "bar": 24}} 137 | 138 | # check that the CLI argument overrides the JSON file 139 | result = runner.invoke(parser, ["--model", str(path), "--model.nested.foo", "overridden"]) 140 | assert result.exception is None 141 | assert result.return_value == {"foo": "baz", "bar": 42, "nested": {"foo": "overridden", "bar": 24}} 142 | -------------------------------------------------------------------------------- /tests/test_stores/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/edornd/argdantic/9e9d7340d4789ba2617baf323504b8fd14396a99/tests/test_stores/__init__.py -------------------------------------------------------------------------------- /tests/test_stores/test_json.py: -------------------------------------------------------------------------------- 1 | import json 2 | from pathlib import Path 3 | from typing import Any, Dict, List, Set 4 | 5 | from pydantic_settings import BaseSettings 6 | 7 | from argdantic.stores import JsonSettingsStore 8 | from argdantic.testing import CLIRunner 9 | 10 | 11 | def test_json_store_repr(tmp_path: Path) -> None: 12 | path = tmp_path / "settings.json" 13 | store = JsonSettingsStore(path) 14 | assert store.path == path 15 | assert repr(store) == f"" 16 | 17 | 18 | def test_json_store_call(tmp_path: Path) -> None: 19 | path = tmp_path / "settings.json" 20 | store = JsonSettingsStore(path) 21 | assert store.path == path 22 | 23 | class Settings(BaseSettings): 24 | foo: str = "baz" 25 | bar: int = 42 26 | 27 | store(Settings()) 28 | assert "".join(path.read_text().split(" ")) == '{"foo":"baz","bar":42}' 29 | 30 | class Settings(BaseSettings): 31 | foo: str = "baz" 32 | bar: int = 42 33 | baz: List[str] = ["foo", "bar"] 34 | 35 | store(Settings()) 36 | assert "".join(path.read_text().split(" ")) == '{"foo":"baz","bar":42,"baz":["foo","bar"]}' 37 | 38 | class Settings(BaseSettings): 39 | foo: str = "baz" 40 | bar: int = 42 41 | baz: List[str] = ["foo", "bar"] 42 | qux: Set[str] = {"foo", "bar"} 43 | 44 | store(Settings()) 45 | text = path.read_text() 46 | data = json.loads(text) 47 | assert data["foo"] == "baz" 48 | assert data["bar"] == 42 49 | assert all(item in data["baz"] for item in ["foo", "bar"]) 50 | assert all(item in data["qux"] for item in ["foo", "bar"]) 51 | 52 | class Settings(BaseSettings): 53 | foo: str = "baz" 54 | bar: int = 42 55 | baz: List[str] = ["foo", "bar"] 56 | qux: Set[str] = {"foo", "bar"} 57 | quux: Dict[str, Any] = {"foo": "bar"} 58 | 59 | store(Settings()) 60 | text = path.read_text() 61 | data = json.loads(text) 62 | assert data["foo"] == "baz" 63 | assert data["bar"] == 42 64 | assert all(item in data["baz"] for item in ["foo", "bar"]) 65 | assert all(item in data["qux"] for item in ["foo", "bar"]) 66 | assert data["quux"] == {"foo": "bar"} 67 | 68 | 69 | def test_parser_using_json_store(tmp_path: Path, runner: CLIRunner) -> None: 70 | from argdantic import ArgParser 71 | 72 | cli = ArgParser() 73 | path = tmp_path / "settings.json" 74 | 75 | @cli.command(stores=[JsonSettingsStore(path)]) 76 | def main(foo: str = "baz", bar: int = 42) -> None: 77 | return foo, bar 78 | 79 | result = runner.invoke(cli, []) 80 | assert result.exception is None 81 | assert result.return_value == ("baz", 42) 82 | 83 | 84 | def test_parser_using_json_store_complex_data(tmp_path: Path, runner: CLIRunner) -> None: 85 | from pathlib import Path 86 | 87 | from argdantic import ArgParser 88 | 89 | cli = ArgParser() 90 | path = tmp_path / "settings.json" 91 | 92 | @cli.command(stores=[JsonSettingsStore(path)]) 93 | def main(foo: Path = "baz", bar: int = 42) -> None: 94 | return foo, bar 95 | 96 | result = runner.invoke(cli, []) 97 | assert result.exception is None 98 | assert result.return_value == ("baz", 42) 99 | -------------------------------------------------------------------------------- /tests/test_stores/test_toml.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | 3 | import mock 4 | import pytest 5 | from pydantic_settings import BaseSettings 6 | 7 | from argdantic.testing import CLIRunner 8 | 9 | 10 | def test_toml_store_repr(tmp_path: Path) -> None: 11 | from argdantic.stores.toml import TomlSettingsStore 12 | 13 | path = tmp_path / "settings.toml" 14 | store = TomlSettingsStore(path) 15 | assert store.path == path 16 | assert repr(store) == f"" 17 | 18 | 19 | def test_toml_store_import_error(tmp_path: Path) -> None: 20 | from argdantic.stores.toml import TomlSettingsStore 21 | 22 | class Settings(BaseSettings): 23 | foo: str = "baz" 24 | bar: int = 42 25 | 26 | with mock.patch.dict("sys.modules", {"toml": None}): 27 | store = TomlSettingsStore(tmp_path / "settings.toml") 28 | assert repr(store) == f"" 29 | with pytest.raises(ImportError): 30 | store(Settings()) 31 | 32 | 33 | def test_toml_store_call(tmp_path: Path) -> None: 34 | from argdantic.stores.toml import TomlSettingsStore 35 | 36 | path = tmp_path / "settings.toml" 37 | store = TomlSettingsStore(path) 38 | assert store.path == path 39 | 40 | class Settings(BaseSettings): 41 | foo: str = "baz" 42 | bar: int = 42 43 | 44 | store(Settings()) 45 | # read data, remove trailing newline and trailing whitespace 46 | data = path.read_text().strip().replace("\n", " ") 47 | assert data == 'foo = "baz" bar = 42' 48 | 49 | 50 | def test_parser_using_toml_store(tmp_path: Path, runner: CLIRunner) -> None: 51 | from argdantic import ArgParser 52 | from argdantic.stores.toml import TomlSettingsStore 53 | 54 | path = tmp_path / "settings.toml" 55 | parser = ArgParser() 56 | 57 | @parser.command(stores=[TomlSettingsStore(path)]) 58 | def main(foo: str = "baz", bar: int = 42) -> None: 59 | return foo, bar 60 | 61 | result = runner.invoke(parser, []) 62 | assert result.exception is None 63 | assert result.return_value == ("baz", 42) 64 | 65 | 66 | def test_parser_using_toml_store_complex_data(tmp_path: Path, runner: CLIRunner) -> None: 67 | from argdantic import ArgParser 68 | from argdantic.stores.toml import TomlSettingsStore 69 | 70 | path = tmp_path / "settings.toml" 71 | parser = ArgParser() 72 | 73 | @parser.command(stores=[TomlSettingsStore(path, mode="json")]) 74 | def main(foo: Path = "baz", bar: int = 42) -> None: 75 | return foo, bar 76 | 77 | result = runner.invoke(parser, []) 78 | assert result.exception is None 79 | assert result.return_value == ("baz", 42) 80 | -------------------------------------------------------------------------------- /tests/test_stores/test_yaml.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | 3 | import mock 4 | import pytest 5 | from pydantic_settings import BaseSettings 6 | 7 | from argdantic.testing import CLIRunner 8 | 9 | 10 | def test_yaml_store_import_error(tmp_path: Path) -> None: 11 | from argdantic.stores.yaml import YamlSettingsStore 12 | 13 | class Settings(BaseSettings): 14 | foo: str = "baz" 15 | bar: int = 42 16 | 17 | with mock.patch.dict("sys.modules", {"yaml": None}): 18 | store = YamlSettingsStore(tmp_path / "settings.yaml") 19 | assert repr(store) == f"" 20 | assert isinstance(store, YamlSettingsStore) 21 | with pytest.raises(Exception): 22 | data = Settings() 23 | store(data) 24 | 25 | 26 | def test_yaml_store_repr(tmp_path: Path) -> None: 27 | from argdantic.stores.yaml import YamlSettingsStore 28 | 29 | path = tmp_path / "settings.yaml" 30 | store = YamlSettingsStore(path) 31 | assert store.path == path 32 | assert repr(store) == f"" 33 | 34 | 35 | def test_yaml_store_call(tmp_path: Path) -> None: 36 | from argdantic.stores.yaml import YamlSettingsStore 37 | 38 | path = tmp_path / "settings.yaml" 39 | store = YamlSettingsStore(path) 40 | assert store.path == path 41 | 42 | class Settings(BaseSettings): 43 | foo: str = "baz" 44 | bar: int = 42 45 | 46 | store(Settings()) 47 | # read data, remove trailing newline and trailing whitespace 48 | data = path.read_text().strip().replace("\n", " ") 49 | assert data == "bar: 42 foo: baz" 50 | 51 | 52 | def test_parser_using_yaml_store(tmp_path: Path, runner: CLIRunner) -> None: 53 | from argdantic import ArgParser 54 | from argdantic.stores.yaml import YamlSettingsStore 55 | 56 | path = tmp_path / "settings.yaml" 57 | parser = ArgParser() 58 | 59 | @parser.command(stores=[YamlSettingsStore(path)]) 60 | def main(foo: str = "baz", bar: int = 42) -> None: 61 | return foo, bar 62 | 63 | result = runner.invoke(parser, []) 64 | assert result.exception is None 65 | assert result.return_value == ("baz", 42) 66 | 67 | result = runner.invoke(parser, ["--foo", "qux"]) 68 | assert result.exception is None 69 | assert result.return_value == ("qux", 42) 70 | assert result.return_value == ("qux", 42) 71 | 72 | 73 | def test_parser_using_yaml_store_complex_data(tmp_path: Path, runner: CLIRunner) -> None: 74 | from argdantic import ArgParser 75 | from argdantic.stores.yaml import YamlSettingsStore 76 | 77 | path = tmp_path / "settings.yaml" 78 | parser = ArgParser() 79 | 80 | @parser.command(stores=[YamlSettingsStore(path, mode="json")]) 81 | def main(foo: Path = "baz", bar: int = 42) -> None: 82 | return str(foo), bar 83 | 84 | result = runner.invoke(parser, []) 85 | assert result.exception is None 86 | assert result.return_value == ("baz", 42) 87 | 88 | result = runner.invoke(parser, ["--foo", "qux", "--bar", "24"]) 89 | assert result.exception is None 90 | print(result.return_value) 91 | assert result.return_value == ("qux", 24) 92 | -------------------------------------------------------------------------------- /tests/test_testing.py: -------------------------------------------------------------------------------- 1 | import logging 2 | 3 | import pytest 4 | from pytest import CaptureFixture 5 | 6 | from argdantic import ArgParser 7 | from argdantic.testing import CLIRunner 8 | 9 | LOG = logging.getLogger(__name__) 10 | 11 | 12 | def test_cli_runner_no_catch_exceptions(capsys: CaptureFixture): 13 | parser = ArgParser() 14 | runner = CLIRunner(catch_exceptions=False) 15 | 16 | @parser.command() 17 | def cli_runner_no_catch_exceptions(): 18 | raise ValueError("hello") 19 | 20 | with pytest.raises(ValueError): 21 | runner.invoke(parser, []) 22 | 23 | 24 | def test_cli_runner_exception_result(capsys: CaptureFixture): 25 | parser = ArgParser() 26 | runner = CLIRunner(catch_exceptions=True) 27 | 28 | @parser.command() 29 | def cli_runner_exception_result(): 30 | raise ValueError("hello") 31 | 32 | result = runner.invoke(parser, []) 33 | assert result.exception 34 | assert result.exception.args[0] == "hello" 35 | 36 | 37 | def test_cli_runner_invoke_params(capsys: CaptureFixture): 38 | parser = ArgParser() 39 | runner = CLIRunner(catch_exceptions=True) 40 | 41 | @parser.command() 42 | def cli_runner_invoke_params(a: int, b: int): 43 | print(a, b) 44 | return a + b 45 | 46 | result = runner.invoke(parser, ["--a", "1", "--b", "2"]) 47 | assert result.return_value == 3 48 | assert result.exception is None 49 | assert result.exc_info is None 50 | -------------------------------------------------------------------------------- /tests/test_types/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/edornd/argdantic/9e9d7340d4789ba2617baf323504b8fd14396a99/tests/test_types/__init__.py -------------------------------------------------------------------------------- /tests/test_types/test_choices.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | import logging 3 | from enum import Enum 4 | from typing import Iterable, Literal 5 | 6 | from pytest import CaptureFixture 7 | 8 | from argdantic import ArgParser 9 | from argdantic.testing import CLIRunner 10 | 11 | LOG = logging.getLogger(__name__) 12 | 13 | 14 | def test_literal_types_required_error(runner: CLIRunner, capsys: CaptureFixture): 15 | parser = ArgParser() 16 | 17 | @parser.command() 18 | def literal_types_required_error(a: Literal["a", "b", "c"]): 19 | print(a) 20 | 21 | runner.invoke(parser, []) 22 | output = capsys.readouterr() 23 | LOG.debug(output) 24 | "error: the following arguments are required: --a" in output.err.rstrip() 25 | assert output.out.rstrip() == "" 26 | 27 | 28 | def test_literal_types_wrong_arg(runner: CLIRunner, capsys: CaptureFixture): 29 | parser = ArgParser() 30 | 31 | @parser.command() 32 | def literal_types_wrong_arg(a: Literal["a", "b", "c"]): 33 | print(a) 34 | 35 | runner.invoke(parser, ["--a", "d"]) 36 | output = capsys.readouterr() 37 | LOG.debug(output) 38 | "error: argument --a: invalid choice: 'd' (choose from 'a', 'b', 'c')" in output.err.rstrip() 39 | assert output.out.rstrip() == "" 40 | 41 | 42 | def test_choice_iterable(runner: CLIRunner, capsys: CaptureFixture): 43 | parser = ArgParser() 44 | 45 | @parser.command() 46 | def choice_iterable(a: Literal["a", "b", "c"]): 47 | print(a) 48 | 49 | choice = list(choice_iterable.arguments)[0] 50 | choice.build(argparse.ArgumentParser()) 51 | assert issubclass(choice.field_type, Enum) 52 | assert len(choice) == 3 53 | assert "a" in choice 54 | assert next(choice) == choice.field_type.a 55 | assert isinstance(iter(choice), Iterable) 56 | -------------------------------------------------------------------------------- /tests/test_types/test_forwardrefs.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import logging 4 | 5 | from pydantic import BaseModel 6 | from pytest import CaptureFixture 7 | 8 | from argdantic import ArgParser 9 | from argdantic.testing import CLIRunner 10 | 11 | LOG = logging.getLogger(__name__) 12 | 13 | # the only way this works is to define at module level 14 | # inside the function this will not work, probably because of the forward reference 15 | # see https://github.com/pydantic/pydantic/discussions/4521 for more info 16 | parser = ArgParser() 17 | 18 | 19 | class NestedModel(BaseModel): 20 | a: int 21 | b: float 22 | 23 | 24 | class SimpleModel(BaseModel): 25 | a: int 26 | b: float 27 | c: NestedModel 28 | 29 | 30 | def test_pydantic_forwardrefs(runner: CLIRunner, capsys: CaptureFixture): 31 | @parser.command() 32 | def pydantic_model_nested(model: SimpleModel): 33 | print(model.a, model.b, model.c.a, model.c.b) 34 | 35 | runner.invoke(parser, ["--model.a", "2", "--model.b", "2.0", "--model.c.a", "3", "--model.c.b", "3.0"]) 36 | output = capsys.readouterr() 37 | LOG.debug(output) 38 | assert output.err.rstrip() == "" 39 | assert output.out.rstrip() == "2 2.0 3 3.0" 40 | -------------------------------------------------------------------------------- /tests/test_types/test_models.py: -------------------------------------------------------------------------------- 1 | import logging 2 | 3 | from pydantic import BaseModel 4 | from pytest import CaptureFixture 5 | 6 | from argdantic import ArgParser 7 | from argdantic.testing import CLIRunner 8 | 9 | LOG = logging.getLogger(__name__) 10 | 11 | 12 | def test_pydantic_model_simple(runner: CLIRunner, capsys: CaptureFixture): 13 | parser = ArgParser() 14 | 15 | class SimpleModel(BaseModel): 16 | a: int 17 | b: float 18 | 19 | @parser.command() 20 | def pydantic_model_simple(model: SimpleModel): 21 | print(model.a, model.b) 22 | 23 | runner.invoke(parser, ["--model.a", "2", "--model.b", "2.0"]) 24 | output = capsys.readouterr() 25 | LOG.debug(output) 26 | assert output.err.rstrip() == "" 27 | assert output.out.rstrip() == "2 2.0" 28 | 29 | 30 | def test_pydantic_model_nested(runner: CLIRunner, capsys: CaptureFixture): 31 | parser = ArgParser() 32 | 33 | class NestedModel(BaseModel): 34 | a: int 35 | b: float 36 | 37 | class SimpleModel(BaseModel): 38 | a: int 39 | b: float 40 | c: NestedModel 41 | 42 | @parser.command() 43 | def pydantic_model_nested(model: SimpleModel): 44 | print(model.a, model.b, model.c.a, model.c.b) 45 | 46 | runner.invoke(parser, ["--model.a", "2", "--model.b", "2.0", "--model.c.a", "3", "--model.c.b", "3.0"]) 47 | output = capsys.readouterr() 48 | LOG.debug(output) 49 | assert output.err.rstrip() == "" 50 | assert output.out.rstrip() == "2 2.0 3 3.0" 51 | -------------------------------------------------------------------------------- /tests/test_types/test_primitives.py: -------------------------------------------------------------------------------- 1 | import logging 2 | from typing import Union 3 | 4 | import pytest 5 | from pytest import CaptureFixture 6 | 7 | from argdantic import ArgParser 8 | from argdantic.testing import CLIRunner 9 | 10 | LOG = logging.getLogger(__name__) 11 | 12 | 13 | def test_numerical_types_required_error(runner: CLIRunner, capsys: CaptureFixture): 14 | parser = ArgParser() 15 | 16 | @parser.command() 17 | def numerical_types_required_error(a: int, b: float): 18 | print(a, b) 19 | 20 | runner.invoke(parser, []) 21 | output = capsys.readouterr() 22 | LOG.debug(output) 23 | "error: the following arguments are required: --a, --b" in output.err.rstrip() 24 | assert output.out.rstrip() == "" 25 | 26 | 27 | def test_numerical_types_default_values(runner: CLIRunner, capsys: CaptureFixture): 28 | parser = ArgParser() 29 | 30 | @parser.command() 31 | def numerical_types_default_values(a: int = 1, b: float = 1.0): 32 | print(a, b) 33 | 34 | runner.invoke(parser, []) 35 | output = capsys.readouterr() 36 | LOG.debug(output) 37 | assert output.err.rstrip() == "" 38 | assert output.out.rstrip() == "1 1.0" 39 | 40 | 41 | def test_numerical_types(runner: CLIRunner, capsys: CaptureFixture): 42 | parser = ArgParser() 43 | 44 | @parser.command() 45 | def numerical_types(a: int = 1, b: float = 1.0): 46 | print(a, b) 47 | 48 | runner.invoke(parser, ["--a", "2", "--b", "2.0"]) 49 | output = capsys.readouterr() 50 | LOG.debug(output) 51 | assert output.err.rstrip() == "" 52 | assert output.out.rstrip() == "2 2.0" 53 | 54 | 55 | def test_numerical_types_help(runner: CLIRunner, capsys: CaptureFixture): 56 | parser = ArgParser() 57 | 58 | @parser.command() 59 | def numerical_types_help(a: int = 1, b: float = 1.0): 60 | print(a, b) 61 | 62 | runner.invoke(parser, ["--help"]) 63 | output = capsys.readouterr() 64 | LOG.debug(output) 65 | assert output.err.rstrip() == "" 66 | assert "usage" in output.out.rstrip() 67 | assert "[-h] [--a INT] [--b FLOAT]" in output.out.rstrip() 68 | 69 | 70 | def test_boolean_types_required_error(runner: CLIRunner, capsys: CaptureFixture): 71 | parser = ArgParser() 72 | 73 | @parser.command() 74 | def boolean_types_required_error(a: bool, b: bool): 75 | print(a, b) 76 | 77 | runner.invoke(parser, []) 78 | output = capsys.readouterr() 79 | LOG.debug(output) 80 | "error: the following arguments are required: --a, --b" in output.err.rstrip() 81 | assert output.out.rstrip() == "" 82 | 83 | 84 | def test_boolean_types_default_values(runner: CLIRunner, capsys: CaptureFixture): 85 | parser = ArgParser() 86 | 87 | @parser.command() 88 | def boolean_types_default_values(a: bool = True, b: bool = False): 89 | print(a, b) 90 | 91 | runner.invoke(parser, []) 92 | output = capsys.readouterr() 93 | LOG.debug(output) 94 | assert output.err.rstrip() == "" 95 | assert output.out.rstrip() == "True False" 96 | 97 | 98 | def test_boolean_types(runner: CLIRunner, capsys: CaptureFixture): 99 | parser = ArgParser() 100 | 101 | @parser.command() 102 | def boolean_types(a: bool, b: bool): 103 | print(a, b) 104 | 105 | runner.invoke(parser, ["--no-a", "--b"]) 106 | output = capsys.readouterr() 107 | LOG.debug(output) 108 | assert output.err.rstrip() == "" 109 | assert output.out.rstrip() == "False True" 110 | 111 | 112 | def test_boolean_types_help(runner: CLIRunner, capsys: CaptureFixture): 113 | parser = ArgParser() 114 | 115 | @parser.command() 116 | def boolean_types_help(a: bool, b: bool): 117 | print(a, b) 118 | 119 | runner.invoke(parser, ["--help"]) 120 | output = capsys.readouterr() 121 | LOG.debug(output) 122 | assert output.err.rstrip() == "" 123 | assert "usage" in output.out.rstrip() 124 | assert "[-h] (--a | --no-a) (--b | --no-b)" in output.out.rstrip() 125 | 126 | 127 | def test_string_types_required_error(runner: CLIRunner, capsys: CaptureFixture): 128 | parser = ArgParser() 129 | 130 | @parser.command() 131 | def string_types_required_error(a: str, b: str): 132 | print(a, b) 133 | 134 | runner.invoke(parser, []) 135 | output = capsys.readouterr() 136 | LOG.debug(output) 137 | "error: the following arguments are required: --a, --b" in output.err.rstrip() 138 | assert output.out.rstrip() == "" 139 | 140 | 141 | def test_string_types_default_values(runner: CLIRunner, capsys: CaptureFixture): 142 | parser = ArgParser() 143 | 144 | @parser.command() 145 | def string_types_default_values(a: str = "a", b: str = "b"): 146 | print(a, b) 147 | 148 | runner.invoke(parser, []) 149 | output = capsys.readouterr() 150 | LOG.debug(output) 151 | assert output.err.rstrip() == "" 152 | assert output.out.rstrip() == "a b" 153 | 154 | 155 | def test_string_types(runner: CLIRunner, capsys: CaptureFixture): 156 | parser = ArgParser() 157 | 158 | @parser.command() 159 | def string_types(a: str = "a", b: str = "b"): 160 | print(a, b) 161 | 162 | runner.invoke(parser, ["--a", "aa", "--b", "bb"]) 163 | output = capsys.readouterr() 164 | LOG.debug(output) 165 | assert output.err.rstrip() == "" 166 | assert output.out.rstrip() == "aa bb" 167 | 168 | 169 | def test_string_types_help(runner: CLIRunner, capsys: CaptureFixture): 170 | parser = ArgParser() 171 | 172 | @parser.command() 173 | def string_types_help(a: str = "a", b: str = "b"): 174 | print(a, b) 175 | 176 | runner.invoke(parser, ["--help"]) 177 | output = capsys.readouterr() 178 | LOG.debug(output) 179 | assert output.err.rstrip() == "" 180 | assert "usage:" in output.out.rstrip() 181 | assert "[-h] [--a TEXT] [--b TEXT]" in output.out.rstrip() 182 | 183 | 184 | def test_bytes_types_required_error(runner: CLIRunner, capsys: CaptureFixture): 185 | parser = ArgParser() 186 | 187 | @parser.command() 188 | def bytes_types_required_error(a: bytes, b: bytes): 189 | print(a, b) 190 | 191 | runner.invoke(parser, []) 192 | output = capsys.readouterr() 193 | LOG.debug(output) 194 | "error: the following arguments are required: --a, --b" in output.err.rstrip() 195 | assert output.out.rstrip() == "" 196 | 197 | 198 | def test_bytes_types_default_values(runner: CLIRunner, capsys: CaptureFixture): 199 | parser = ArgParser() 200 | 201 | @parser.command() 202 | def bytes_types_default_values(a: bytes = b"a", b: bytes = b"b"): 203 | print(a, b) 204 | 205 | runner.invoke(parser, []) 206 | output = capsys.readouterr() 207 | LOG.debug(output) 208 | assert output.err.rstrip() == "" 209 | assert output.out.rstrip() == "b'a' b'b'" 210 | 211 | 212 | def test_bytes_types(runner: CLIRunner, capsys: CaptureFixture): 213 | parser = ArgParser() 214 | 215 | @parser.command() 216 | def bytes_types(a: bytes = b"a", b: bytes = b"b"): 217 | print(a, b) 218 | 219 | runner.invoke(parser, ["--a", "aa", "--b", "bb"]) 220 | output = capsys.readouterr() 221 | LOG.debug(output) 222 | assert output.err.rstrip() == "" 223 | assert output.out.rstrip() == "b'aa' b'bb'" 224 | 225 | 226 | def test_bytes_types_help(runner: CLIRunner, capsys: CaptureFixture): 227 | parser = ArgParser() 228 | 229 | @parser.command() 230 | def bytes_types_help(a: bytes = b"a", b: bytes = b"b"): 231 | print(a, b) 232 | 233 | runner.invoke(parser, ["--help"]) 234 | output = capsys.readouterr() 235 | LOG.debug(output) 236 | assert output.err.rstrip() == "" 237 | assert "usage:" in output.out.rstrip() 238 | assert "[-h] [--a BYTES] [--b BYTES]" in output.out.rstrip() 239 | 240 | 241 | def test_value_error_on_union(capsys: CaptureFixture): 242 | parser = ArgParser() 243 | runner = CLIRunner(catch_exceptions=False) 244 | 245 | @parser.command() 246 | def value_error_on_union(a: Union[int, str]): 247 | print(a) 248 | 249 | with pytest.raises(ValueError): 250 | runner.invoke(parser, ["--a", "aa"]) 251 | -------------------------------------------------------------------------------- /tests/test_types/test_sequences.py: -------------------------------------------------------------------------------- 1 | import logging 2 | 3 | from pytest import CaptureFixture 4 | 5 | from argdantic import ArgParser 6 | from argdantic.testing import CLIRunner 7 | 8 | LOG = logging.getLogger(__name__) 9 | 10 | 11 | def test_list_type_required_error(runner: CLIRunner, capsys: CaptureFixture): 12 | parser = ArgParser() 13 | 14 | @parser.command() 15 | def list_type_required_error(a: list): 16 | print(a) 17 | 18 | runner.invoke(parser, []) 19 | output = capsys.readouterr() 20 | LOG.debug(output) 21 | "error: the following arguments are required: --a" in output.err.rstrip() 22 | assert output.out.rstrip() == "" 23 | 24 | 25 | def test_list_type_wrong_arg(runner: CLIRunner, capsys: CaptureFixture): 26 | parser = ArgParser() 27 | 28 | @parser.command() 29 | def list_type_wrong_arg(a: list): 30 | print(a) 31 | 32 | runner.invoke(parser, ["--a"]) 33 | output = capsys.readouterr() 34 | LOG.debug(output) 35 | assert "argument --a: expected at least one argument" in output.err.rstrip() 36 | assert output.out.rstrip() == "" 37 | 38 | 39 | def test_list_type(runner: CLIRunner, capsys: CaptureFixture): 40 | parser = ArgParser() 41 | 42 | @parser.command() 43 | def list_type(a: list): 44 | print(a) 45 | 46 | runner.invoke(parser, ["--a", "1", "2", "3"]) 47 | output = capsys.readouterr() 48 | LOG.debug(output) 49 | assert output.err.rstrip() == "" 50 | assert output.out.rstrip() == "['1', '2', '3']" 51 | -------------------------------------------------------------------------------- /tests/test_utils.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import typing as types 3 | 4 | from argdantic.utils import is_container, is_mapping, is_multiple, is_typing, type_name 5 | 6 | LOG = logging.getLogger(__name__) 7 | 8 | 9 | def test_type_names(): 10 | assert type_name(int) == "int" 11 | assert type_name(str) == "str" 12 | assert type_name(float) == "float" 13 | assert type_name(bool) == "bool" 14 | assert type_name(bytes) == "bytes" 15 | assert type_name(list) == "list" 16 | assert type_name(tuple) == "tuple" 17 | assert type_name(dict) == "dict" 18 | assert type_name(set) == "set" 19 | assert type_name(frozenset) == "frozenset" 20 | assert type_name(types.Mapping) == "mapping" 21 | assert type_name(types.Sequence) == "sequence" 22 | assert type_name(types.Iterable) == "iterable" 23 | assert type_name(types.Container) == "container" 24 | assert type_name(types.List) == "list" 25 | assert type_name(types.Tuple) == "tuple" 26 | assert type_name(types.Set) == "set" 27 | assert type_name(types.FrozenSet) == "frozenset" 28 | assert type_name(types.Dict) == "dict" 29 | assert type_name(types.Deque) == "deque" 30 | assert type_name(types.DefaultDict) == "defaultdict" 31 | 32 | 33 | def test_registry_set(): 34 | from argdantic.registry import Registry 35 | 36 | registry = Registry() 37 | registry["foo"] = "bar" 38 | assert registry.store == {"foo": "bar"} 39 | 40 | 41 | def test_registry_get(): 42 | from argdantic.registry import Registry 43 | 44 | registry = Registry() 45 | registry["foo"] = "bar" 46 | assert registry["foo"] == "bar" 47 | 48 | 49 | def test_registry_get_missing(): 50 | from argdantic.registry import Registry 51 | 52 | registry = Registry() 53 | result = registry["foo"] 54 | assert result is None 55 | 56 | 57 | def test_registry_length(): 58 | from argdantic.registry import Registry 59 | 60 | registry = Registry() 61 | assert len(registry) == 0 62 | registry["foo"] = "bar" 63 | assert len(registry) == 1 64 | registry["bar"] = "baz" 65 | assert len(registry) == 2 66 | del registry["foo"] 67 | assert len(registry) == 1 68 | del registry["bar"] 69 | assert len(registry) == 0 70 | 71 | 72 | def test_registry_iterate(): 73 | from argdantic.registry import Registry 74 | 75 | registry = Registry() 76 | registry["foo"] = "bar" 77 | registry["bar"] = "baz" 78 | assert list(registry) == ["foo", "bar"] 79 | 80 | 81 | def test_is_multiple(): 82 | assert is_multiple(list) is True 83 | assert is_multiple(tuple) is True 84 | assert is_multiple(range) is True 85 | assert is_multiple(dict) is False 86 | assert is_multiple(int) is False 87 | assert is_multiple(str) is False 88 | assert is_multiple(bytes) is False 89 | assert is_multiple(float) is False 90 | assert is_multiple(bool) is False 91 | assert is_multiple(None) is False 92 | assert is_multiple(object) is False 93 | assert is_multiple(type) is False 94 | assert is_multiple(types.Generic) is False 95 | assert is_multiple(types.Sequence) is True 96 | assert is_multiple(types.Sequence[int]) is True 97 | assert is_multiple(types.List) is True 98 | assert is_multiple(types.Tuple[str]) is True 99 | 100 | 101 | def test_is_container(): 102 | assert is_container(list) is True 103 | assert is_container(tuple) is True 104 | assert is_container(range) is True 105 | assert is_container(dict) is True 106 | assert is_container(int) is False 107 | assert is_container(str) is False 108 | assert is_container(bytes) is False 109 | assert is_container(float) is False 110 | assert is_container(bool) is False 111 | assert is_container(None) is False 112 | assert is_container(object) is False 113 | assert is_container(type) is False 114 | assert is_container(types.Generic) is False 115 | assert is_container(types.Sequence) is True 116 | assert is_container(types.Sequence[int]) is True 117 | assert is_container(types.List) is True 118 | assert is_container(types.Tuple[str]) is True 119 | assert is_container(types.Mapping) is True 120 | 121 | 122 | def test_is_mapping(): 123 | assert is_mapping(list) is False 124 | assert is_mapping(tuple) is False 125 | assert is_mapping(range) is False 126 | assert is_mapping(dict) is True 127 | assert is_mapping(int) is False 128 | assert is_mapping(str) is False 129 | assert is_mapping(bytes) is False 130 | assert is_mapping(float) is False 131 | assert is_mapping(bool) is False 132 | assert is_mapping(None) is False 133 | assert is_mapping(object) is False 134 | assert is_mapping(type) is False 135 | assert is_mapping(types.Generic) is False 136 | assert is_mapping(types.Sequence) is False 137 | assert is_mapping(types.Sequence[int]) is False 138 | assert is_mapping(types.List) is False 139 | assert is_mapping(types.Tuple[str]) is False 140 | assert is_mapping(types.Mapping) is True 141 | assert is_mapping(types.Mapping[str, int]) is True 142 | assert is_mapping(types.Dict) is True 143 | 144 | 145 | def test_is_typing(): 146 | assert is_typing(list) is False 147 | assert is_typing(tuple) is False 148 | assert is_typing(range) is False 149 | assert is_typing(dict) is False 150 | assert is_typing(int) is False 151 | assert is_typing(str) is False 152 | assert is_typing(bytes) is False 153 | assert is_typing(float) is False 154 | assert is_typing(bool) is False 155 | assert is_typing(None) is False 156 | assert is_typing(object) is False 157 | assert is_typing(type) is False 158 | assert is_typing(types.Type) is True 159 | assert is_typing(types.List[types.Set[str]]) is False 160 | --------------------------------------------------------------------------------