├── .config ├── constraints.txt ├── dictionary.txt ├── pydoclint-baseline.txt ├── requirements-docs.in ├── requirements-test.in └── requirements.in ├── .env ├── .github ├── CODEOWNERS ├── CODE_OF_CONDUCT.md ├── dependabot.yml ├── release-drafter.yml └── workflows │ ├── ack.yml │ ├── push.yml │ ├── release.yml │ └── tox.yml ├── .gitignore ├── .pre-commit-config.yaml ├── .prettierrc.yaml ├── .readthedocs.yml ├── .sonarcloud.properties ├── .taplo.toml ├── .vscode ├── extensions.json ├── launch.json ├── settings.json └── tasks.json ├── LICENSE ├── README.md ├── ansible.cfg ├── codecov.yml ├── cspell.config.yaml ├── docs └── index.md ├── mise.toml ├── mkdocs.yml ├── pyproject.toml ├── src └── ansible_dev_environment │ ├── __init__.py │ ├── __main__.py │ ├── arg_parser.py │ ├── cli.py │ ├── collection.py │ ├── config.py │ ├── definitions.py │ ├── output.py │ ├── subcommands │ ├── __init__.py │ ├── checker.py │ ├── inspector.py │ ├── installer.py │ ├── lister.py │ ├── treemaker.py │ └── uninstaller.py │ ├── tree.py │ └── utils.py ├── tests ├── __init__.py ├── conftest.py ├── fixtures │ ├── galaxy.yml │ └── requirements.yml ├── integration │ ├── __init__.py │ ├── test_basic.py │ └── test_user_python.py ├── test_argparser.py └── unit │ ├── __init__.py │ ├── conftest.py │ ├── test_cli.py │ ├── test_cli_deprecated.py │ ├── test_cli_isolation.py │ ├── test_cli_precedence.py │ ├── test_collection.py │ ├── test_config.py │ ├── test_inspector.py │ ├── test_installer.py │ ├── test_lister.py │ ├── test_main.py │ ├── test_output.py │ ├── test_tree.py │ ├── test_treemaker.py │ ├── test_uninstaller.py │ └── test_utils.py ├── tools └── report-coverage └── tox.ini /.config/constraints.txt: -------------------------------------------------------------------------------- 1 | # 2 | # This file is autogenerated by pip-compile with Python 3.10 3 | # by the following command: 4 | # 5 | # pip-compile --all-extras --no-annotate --output-file=.config/constraints.txt --strip-extras .config/requirements.in pyproject.toml 6 | # 7 | ansible-builder==3.1.0 8 | argcomplete==3.6.2 9 | astroid==3.3.10 10 | attrs==25.3.0 11 | babel==2.17.0 12 | backrefs==5.8 13 | beautifulsoup4==4.13.4 14 | bindep==2.13.0 15 | build==1.2.2.post1 16 | cachetools==6.0.0 17 | cairocffi==1.7.1 18 | cairosvg==2.8.2 19 | certifi==2025.4.26 20 | cffi==1.17.1 21 | cfgv==3.4.0 22 | chardet==5.2.0 23 | charset-normalizer==3.4.2 24 | click==8.1.8 25 | colorama==0.4.6 26 | coverage==7.8.2 27 | csscompressor==0.9.5 28 | cssselect2==0.8.0 29 | defusedxml==0.7.1 30 | dill==0.4.0 31 | distlib==0.3.9 32 | distro==1.9.0 33 | dnspython==2.7.0 34 | docstring-parser-fork==0.0.12 35 | exceptiongroup==1.3.0 36 | execnet==2.1.1 37 | filelock==3.18.0 38 | ghp-import==2.1.0 39 | griffe==1.7.3 40 | hjson==3.1.0 41 | htmlmin2==0.1.13 42 | identify==2.6.12 43 | idna==3.10 44 | iniconfig==2.1.0 45 | isort==6.0.1 46 | jinja2==3.1.6 47 | jsmin==3.0.1 48 | jsonschema==4.23.0 49 | jsonschema-specifications==2025.4.1 50 | linkchecker==10.5.0 51 | markdown==3.8 52 | markdown-exec==1.10.3 53 | markdown-include==0.8.1 54 | markupsafe==3.0.2 55 | mccabe==0.7.0 56 | mergedeep==1.3.4 57 | mkdocs==1.6.1 58 | mkdocs-ansible==25.2.0 59 | mkdocs-autorefs==1.4.2 60 | mkdocs-gen-files==0.5.0 61 | mkdocs-get-deps==0.2.0 62 | mkdocs-htmlproofer-plugin==1.3.0 63 | mkdocs-macros-plugin==1.3.7 64 | mkdocs-material==9.6.14 65 | mkdocs-material-extensions==1.3.1 66 | mkdocs-minify-plugin==0.8.0 67 | mkdocs-monorepo-plugin==1.1.0 68 | mkdocstrings==0.29.1 69 | mkdocstrings-python==1.16.11 70 | mypy==1.15.0 71 | mypy-extensions==1.1.0 72 | nodeenv==1.9.1 73 | packaging==25.0 74 | paginate==0.5.7 75 | parsley==1.3 76 | pathspec==0.12.1 77 | pbr==6.1.1 78 | pillow==11.2.1 79 | pip-tools==7.4.1 80 | platformdirs==4.3.8 81 | pluggy==1.6.0 82 | pre-commit==4.2.0 83 | pycparser==2.22 84 | pydoclint==0.6.6 85 | pygments==2.19.1 86 | pylint==3.3.7 87 | pymdown-extensions==10.15 88 | pyproject-api==1.9.1 89 | pyproject-hooks==1.2.0 90 | pytest==8.3.5 91 | pytest-instafail==0.5.0 92 | pytest-xdist==3.6.1 93 | python-dateutil==2.9.0.post0 94 | python-slugify==8.0.4 95 | pyyaml==6.0.2 96 | pyyaml-env-tag==1.1 97 | referencing==0.36.2 98 | requests==2.32.3 99 | rpds-py==0.25.1 100 | ruff==0.11.11 101 | six==1.17.0 102 | soupsieve==2.7 103 | subprocess-tee==0.4.2 104 | super-collections==0.5.3 105 | termcolor==3.1.0 106 | text-unidecode==1.3 107 | tinycss2==1.4.0 108 | toml-sort==0.24.2 109 | tomli==2.2.1 110 | tomlkit==0.13.2 111 | tox==4.26.0 112 | types-pyyaml==6.0.12.20250516 113 | typing-extensions==4.13.2 114 | urllib3==2.4.0 115 | uv==0.7.8 116 | virtualenv==20.31.2 117 | watchdog==6.0.0 118 | webencodings==0.5.1 119 | wheel==0.45.1 120 | 121 | # The following packages are considered to be unsafe in a requirements file: 122 | # pip 123 | # setuptools 124 | -------------------------------------------------------------------------------- /.config/dictionary.txt: -------------------------------------------------------------------------------- 1 | acfg 2 | argnames 3 | argvalues 4 | bindep 5 | bindir 6 | bthornto 7 | caplog 8 | capsys 9 | cauthor 10 | cdescription 11 | cnamespace 12 | collectonly 13 | cpath 14 | crepository 15 | csource 16 | delenv 17 | excinfo 18 | fileh 19 | fqcn 20 | levelname 21 | netcommon 22 | platinclude 23 | platlib 24 | platstdlib 25 | purelib 26 | pylibssh 27 | reqs 28 | sessionstart 29 | setenv 30 | treemaker 31 | unisolated 32 | usefixtures 33 | xdist 34 | xmltodict 35 | adtv 36 | -------------------------------------------------------------------------------- /.config/pydoclint-baseline.txt: -------------------------------------------------------------------------------- 1 | tests/unit/test_installer.py 2 | DOC502: Function `test_adt_install` has a "Raises" section in the docstring, but there are not "raise" statements in the body 3 | -------------------- 4 | tests/unit/test_treemaker.py 5 | DOC101: Method `SafeEnvBuilder.__init__`: Docstring contains fewer arguments than in function signature. 6 | DOC103: Method `SafeEnvBuilder.__init__`: Docstring arguments are different from function arguments. (Or could be other formatting issues: https://jsh9.github.io/pydoclint/violation_codes.html#notes-on-doc103 ). Arguments in the function signature but not in the docstring: [clear: bool, prompt: str | None, symlinks: bool, system_site_packages: bool, upgrade: bool, upgrade_deps: bool, with_pip: bool]. 7 | -------------------- 8 | -------------------------------------------------------------------------------- /.config/requirements-docs.in: -------------------------------------------------------------------------------- 1 | mkdocs-ansible>=24.3.0 2 | -------------------------------------------------------------------------------- /.config/requirements-test.in: -------------------------------------------------------------------------------- 1 | argcomplete 2 | coverage[toml] 3 | mypy 4 | pip-tools 5 | pre-commit 6 | pydoclint 7 | pylint 8 | pytest 9 | pytest-instafail 10 | pytest-xdist 11 | ruff 12 | toml-sort 13 | tox 14 | types-PyYAML 15 | uv 16 | -------------------------------------------------------------------------------- /.config/requirements.in: -------------------------------------------------------------------------------- 1 | ansible-builder 2 | bindep 3 | pyyaml 4 | subprocess-tee 5 | -------------------------------------------------------------------------------- /.env: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # PWD is supposed to be the directory container the .envrc file based on the 3 | # specs. This trick should isolate our testing from user environment. 4 | ANSIBLE_HOME=$PWD/.ansible 5 | ANSIBLE_CONFIG=$PWD/ansible.cfg 6 | -------------------------------------------------------------------------------- /.github/CODEOWNERS: -------------------------------------------------------------------------------- 1 | * @ansible/devtools 2 | -------------------------------------------------------------------------------- /.github/CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | # Community Code of Conduct 2 | 3 | Please see the official 4 | [Ansible Community Code of Conduct](https://docs.ansible.com/ansible/latest/community/code_of_conduct.html). 5 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | --- 2 | version: 2 3 | updates: 4 | - package-ecosystem: pip 5 | directory: /.config/ 6 | schedule: 7 | day: sunday 8 | interval: weekly 9 | labels: 10 | - dependabot-deps-updates 11 | - skip-changelog 12 | groups: 13 | dependencies: 14 | patterns: 15 | - "*" 16 | - package-ecosystem: "github-actions" 17 | directory: "/" 18 | schedule: 19 | interval: daily 20 | labels: 21 | - "dependencies" 22 | - "skip-changelog" 23 | -------------------------------------------------------------------------------- /.github/release-drafter.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # see https://github.com/ansible/team-devtools 3 | _extends: ansible/team-devtools 4 | -------------------------------------------------------------------------------- /.github/workflows/ack.yml: -------------------------------------------------------------------------------- 1 | # See https://github.com/ansible/team-devtools/blob/main/.github/workflows/ack.yml 2 | name: ack 3 | 4 | concurrency: 5 | group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} 6 | cancel-in-progress: true 7 | 8 | on: 9 | pull_request_target: 10 | types: [opened, labeled, unlabeled, synchronize] 11 | 12 | jobs: 13 | ack: 14 | uses: ansible/team-devtools/.github/workflows/ack.yml@token_revised 15 | secrets: inherit 16 | -------------------------------------------------------------------------------- /.github/workflows/push.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # See https://github.com/ansible/team-devtools/blob/main/.github/workflows/push.yml 3 | name: push 4 | on: 5 | push: 6 | branches: 7 | - main 8 | - "releases/**" 9 | - "stable/**" 10 | workflow_dispatch: 11 | 12 | jobs: 13 | ack: 14 | uses: ansible/team-devtools/.github/workflows/push.yml@main 15 | -------------------------------------------------------------------------------- /.github/workflows/release.yml: -------------------------------------------------------------------------------- 1 | --- 2 | name: release 3 | 4 | on: 5 | release: 6 | types: [published] 7 | 8 | jobs: 9 | release: 10 | environment: release 11 | runs-on: ubuntu-24.04 12 | permissions: 13 | id-token: write 14 | 15 | env: 16 | FORCE_COLOR: 1 17 | PY_COLORS: 1 18 | 19 | steps: 20 | - name: Switch to using Python 3.12 by default 21 | uses: actions/setup-python@v5 22 | with: 23 | python-version: "3.12" 24 | 25 | - name: Install tox 26 | run: python3 -m pip install --user "tox>=4.0.0" 27 | 28 | - name: Check out src from Git 29 | uses: actions/checkout@v4 30 | with: 31 | fetch-depth: 0 # needed by setuptools-scm 32 | 33 | - name: Build dists 34 | run: python3 -m tox -e pkg 35 | 36 | - name: Publish to pypi.org 37 | uses: pypa/gh-action-pypi-publish@release/v1 38 | 39 | forum_post: 40 | needs: release 41 | runs-on: ubuntu-24.04 42 | 43 | steps: 44 | - name: Retreive the forum post script from team-devtools 45 | run: curl -O https://raw.githubusercontent.com/ansible/team-devtools/main/.github/workflows/forum_post.py 46 | 47 | - name: Run the forum post script 48 | run: python3 forum_post.py ${{ github.event.repository.full_name }} ${{ github.event.release.tag_name }} ${{ secrets.FORUM_KEY }} ${{ secrets.FORUM_USER }} 49 | -------------------------------------------------------------------------------- /.github/workflows/tox.yml: -------------------------------------------------------------------------------- 1 | --- 2 | name: tox 3 | 4 | on: 5 | merge_group: 6 | branches: 7 | - "main" 8 | push: 9 | branches: 10 | - "main" 11 | pull_request: 12 | branches: 13 | - "main" 14 | workflow_call: 15 | 16 | concurrency: 17 | group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.sha }} 18 | cancel-in-progress: true 19 | 20 | jobs: 21 | tox: 22 | uses: ansible/team-devtools/.github/workflows/tox.yml@main 23 | with: 24 | max_python: "3.13" 25 | jobs_producing_coverage: 8 26 | other_names: | 27 | docs 28 | lint 29 | pkg 30 | py313-devel 31 | py-mise:tox -e py:mise=true 32 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | share/python-wheels/ 24 | *.egg-info/ 25 | .installed.cfg 26 | *.egg 27 | MANIFEST 28 | 29 | # PyInstaller 30 | # Usually these files are written by a python script from a template 31 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 32 | *.manifest 33 | *.spec 34 | 35 | # Installer logs 36 | pip-log.txt 37 | pip-delete-this-directory.txt 38 | 39 | # Unit test / coverage reports 40 | htmlcov/ 41 | .tox/ 42 | .nox/ 43 | .coverage 44 | .coverage.* 45 | .cache 46 | junit.xml 47 | nosetests.xml 48 | coverage.xml 49 | *.cover 50 | *.lcov 51 | *.py,cover 52 | .hypothesis/ 53 | .pytest_cache/ 54 | cover/ 55 | 56 | # Translations 57 | *.mo 58 | *.pot 59 | 60 | # Django stuff: 61 | *.log 62 | local_settings.py 63 | db.sqlite3 64 | db.sqlite3-journal 65 | 66 | # Flask stuff: 67 | instance/ 68 | .webassets-cache 69 | 70 | # Scrapy stuff: 71 | .scrapy 72 | 73 | # Sphinx documentation 74 | docs/_build/ 75 | 76 | # PyBuilder 77 | .pybuilder/ 78 | target/ 79 | 80 | # Jupyter Notebook 81 | .ipynb_checkpoints 82 | 83 | # IPython 84 | profile_default/ 85 | ipython_config.py 86 | 87 | # pyenv 88 | # For a library or package, you might want to ignore these files since the code is 89 | # intended to run in multiple environments; otherwise, check them in: 90 | # .python-version 91 | 92 | # pipenv 93 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 94 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 95 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 96 | # install all needed dependencies. 97 | #Pipfile.lock 98 | 99 | # poetry 100 | # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. 101 | # This is especially recommended for binary packages to ensure reproducibility, and is more 102 | # commonly ignored for libraries. 103 | # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control 104 | #poetry.lock 105 | 106 | # pdm 107 | # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. 108 | #pdm.lock 109 | # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it 110 | # in version control. 111 | # https://pdm.fming.dev/#use-with-ide 112 | .pdm.toml 113 | 114 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm 115 | __pypackages__/ 116 | 117 | # Celery stuff 118 | celerybeat-schedule 119 | celerybeat.pid 120 | 121 | # SageMath parsed files 122 | *.sage.py 123 | 124 | # Environments 125 | .venv 126 | env/ 127 | venv/ 128 | ENV/ 129 | env.bak/ 130 | venv.bak/ 131 | 132 | # Spyder project settings 133 | .spyderproject 134 | .spyproject 135 | 136 | # Rope project settings 137 | .ropeproject 138 | 139 | # mkdocs documentation 140 | /site 141 | 142 | # mypy 143 | .mypy_cache/ 144 | .dmypy.json 145 | dmypy.json 146 | 147 | # Pyre type checker 148 | .pyre/ 149 | 150 | # pytype static type analyzer 151 | .pytype/ 152 | 153 | # Cython debug symbols 154 | cython_debug/ 155 | 156 | # PyCharm 157 | # JetBrains specific template is maintained in a separate JetBrains.gitignore that can 158 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore 159 | # and can be added to the global gitignore or merged into this file. For a more nuclear 160 | # option (not recommended) you can uncomment the following to ignore the entire idea folder. 161 | #.idea/ 162 | 163 | 164 | 165 | # In contrast to the entries above this line which largely come from 166 | # untracked sources, the following have been inidividually rationalized 167 | # and should all have detailed explanations 168 | 169 | # Version created and populated by setuptools_scm 170 | /src/*/_version.py 171 | 172 | .DS_Store 173 | _readthedocs/ 174 | .ansible 175 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | ci: 3 | # format compatible with commitlint 4 | autoupdate_commit_msg: "chore: pre-commit autoupdate" 5 | autoupdate_schedule: monthly 6 | autofix_commit_msg: "chore: auto fixes from pre-commit.com hooks" 7 | 8 | repos: 9 | - repo: https://github.com/pre-commit/pre-commit-hooks 10 | rev: v5.0.0 11 | hooks: 12 | - id: check-added-large-files 13 | - id: check-merge-conflict 14 | - id: check-symlinks 15 | - id: debug-statements 16 | - id: detect-private-key 17 | - id: end-of-file-fixer 18 | - id: trailing-whitespace 19 | 20 | - repo: https://github.com/asottile/add-trailing-comma.git 21 | rev: v3.1.0 22 | hooks: 23 | - id: add-trailing-comma 24 | 25 | - repo: https://github.com/Lucas-C/pre-commit-hooks.git 26 | rev: v1.5.5 27 | hooks: 28 | - id: remove-tabs 29 | exclude: > 30 | (?x)^( 31 | .config/pydoclint-baseline.txt 32 | )$ 33 | 34 | - repo: https://github.com/pycontribs/mirrors-prettier 35 | rev: v3.5.3 36 | hooks: 37 | - id: prettier 38 | always_run: true 39 | additional_dependencies: 40 | - prettier 41 | - prettier-plugin-toml 42 | - prettier-plugin-sort-json 43 | 44 | - repo: https://github.com/pappasam/toml-sort 45 | rev: v0.24.2 46 | hooks: 47 | - id: toml-sort-fix 48 | alias: toml 49 | 50 | - repo: https://github.com/tox-dev/tox-ini-fmt 51 | rev: 1.5.0 52 | hooks: 53 | - id: tox-ini-fmt 54 | 55 | - repo: https://github.com/astral-sh/ruff-pre-commit 56 | rev: v0.11.8 57 | hooks: 58 | - id: ruff 59 | entry: sh -c 'ruff check --fix --force-exclude && ruff format --force-exclude' 60 | types_or: [python, pyi] 61 | 62 | - repo: https://github.com/streetsidesoftware/cspell-cli 63 | rev: v8.19.1 64 | hooks: 65 | - id: cspell 66 | name: Spell check with cspell 67 | 68 | - repo: https://github.com/jsh9/pydoclint 69 | rev: "0.6.6" 70 | hooks: 71 | - id: pydoclint 72 | # This allows automatic reduction of the baseline file when needed. 73 | entry: sh -ec "pydoclint . && pydoclint --generate-baseline=1 ." 74 | pass_filenames: false 75 | 76 | - repo: https://github.com/pycqa/pylint.git 77 | rev: v3.3.7 78 | hooks: 79 | - id: pylint 80 | args: 81 | - --output-format=colorized 82 | additional_dependencies: 83 | - argcomplete 84 | - pytest 85 | - pyyaml 86 | - subprocess_tee 87 | - setuptools 88 | - tox 89 | 90 | - repo: https://github.com/pre-commit/mirrors-mypy.git 91 | rev: v1.15.0 92 | hooks: 93 | - id: mypy 94 | additional_dependencies: 95 | - argcomplete 96 | - pip 97 | - pytest 98 | - subprocess_tee 99 | - types-pyyaml 100 | - types-setuptools 101 | - uv 102 | # Override default pre-commit '--ignore-missing-imports' 103 | args: [--strict] 104 | 105 | - repo: https://github.com/jazzband/pip-tools 106 | rev: 7.4.1 107 | hooks: 108 | - id: pip-compile 109 | name: deps 110 | alias: deps 111 | stages: [manual] 112 | entry: >- 113 | pip-compile .config/requirements.in 114 | --upgrade 115 | --all-extras 116 | --no-annotate 117 | --strip-extras 118 | --output-file=.config/constraints.txt pyproject.toml 119 | files: ^.config\/.*requirements.*$ 120 | language_version: "3.10" # minimal we support officially 121 | -------------------------------------------------------------------------------- /.prettierrc.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | proseWrap: always 3 | jsonRecursiveSort: true # prettier-plugin-sort-json 4 | tabWidth: 2 5 | useTabs: false 6 | overrides: 7 | - files: 8 | - "*.md" 9 | options: 10 | # compatibility with markdownlint 11 | proseWrap: always 12 | printWidth: 80 13 | - files: 14 | - "*.yaml" 15 | - "*.yml" 16 | options: 17 | # compatibility with yamllint 18 | proseWrap: preserve 19 | -------------------------------------------------------------------------------- /.readthedocs.yml: -------------------------------------------------------------------------------- 1 | --- 2 | version: 2 3 | 4 | mkdocs: 5 | fail_on_warning: true 6 | configuration: mkdocs.yml 7 | 8 | build: 9 | os: ubuntu-24.04 10 | tools: 11 | python: "3.13" 12 | commands: 13 | - python3 -m pip install --user tox 14 | - python3 -m tox -e docs 15 | submodules: 16 | include: all 17 | recursive: true 18 | -------------------------------------------------------------------------------- /.sonarcloud.properties: -------------------------------------------------------------------------------- 1 | sonar.python.version=3.10, 3.11, 3.12, 3.13 2 | sonar.sources=src/ 3 | sonar.tests=tests/ 4 | -------------------------------------------------------------------------------- /.taplo.toml: -------------------------------------------------------------------------------- 1 | [formatting] 2 | # cspell: disable-next-line 3 | # compatibility between toml-sort-fix pre-commit hook and panekj.even-betterer-toml extension 4 | align_comments = false 5 | array_trailing_comma = false 6 | compact_arrays = true 7 | compact_entries = false 8 | compact_inline_tables = true 9 | inline_table_expand = false 10 | reorder_keys = true 11 | -------------------------------------------------------------------------------- /.vscode/extensions.json: -------------------------------------------------------------------------------- 1 | { 2 | "recommendations": [ 3 | "charliermarsh.ruff", 4 | "esbenp.prettier-vscode", 5 | "gruntfuggly.triggertaskonsave", 6 | "markis.code-coverage", 7 | "ms-python.debugpy", 8 | "ms-python.mypy-type-checker", 9 | "ms-python.pylint", 10 | "ms-python.python", 11 | "sonarsource.sonarlint-vscode", 12 | "streetsidesoftware.code-spell-checker" 13 | ], 14 | "unwantedRecommendations": ["ms-python.black-formatter"] 15 | } 16 | -------------------------------------------------------------------------------- /.vscode/launch.json: -------------------------------------------------------------------------------- 1 | { 2 | // Use IntelliSense to learn about possible attributes. 3 | // Hover to view descriptions of existing attributes. 4 | // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387 5 | "version": "0.2.0", 6 | "configurations": [ 7 | { 8 | "name": "Python: Module", 9 | "type": "debugpy", 10 | "request": "launch", 11 | "module": "ansible_dev_environment", 12 | "args": ["inspect", "-v", "-v", "-vv"], 13 | "justMyCode": true 14 | }, 15 | { 16 | "name": "Python: Current File", 17 | "type": "debugpy", 18 | "request": "launch", 19 | "program": "${file}", 20 | "console": "integratedTerminal" 21 | } 22 | ] 23 | } 24 | -------------------------------------------------------------------------------- /.vscode/settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "[jsonc]": { 3 | "editor.defaultFormatter": "esbenp.prettier-vscode" 4 | }, 5 | "[python]": { 6 | "editor.codeActionsOnSave": { 7 | "source.fixAll": "explicit", 8 | "source.organizeImports": "explicit" 9 | }, 10 | "editor.defaultFormatter": "charliermarsh.ruff", 11 | "editor.formatOnSave": true 12 | }, 13 | "flake8.importStrategy": "fromEnvironment", 14 | "markiscodecoverage.searchCriteria": ".cache/.coverage/lcov.info", 15 | "mypy-type-checker.args": ["--config-file=${workspaceFolder}/pyproject.toml"], 16 | "mypy-type-checker.importStrategy": "fromEnvironment", 17 | "mypy-type-checker.reportingScope": "workspace", 18 | "pylint.importStrategy": "fromEnvironment", 19 | "python.testing.pytestArgs": ["tests"], 20 | "python.testing.pytestEnabled": true, 21 | "python.testing.unittestEnabled": false, 22 | "triggerTaskOnSave.tasks": { 23 | "pydoclint": ["*.py"] 24 | }, 25 | "sonarlint.connectedMode.project": { 26 | "connectionId": "ansible", 27 | "projectKey": "ansible_ansible-dev-environment" 28 | } 29 | } 30 | -------------------------------------------------------------------------------- /.vscode/tasks.json: -------------------------------------------------------------------------------- 1 | { 2 | // See https://go.microsoft.com/fwlink/?LinkId=733558 3 | // for the documentation about the tasks.json format 4 | "version": "2.0.0", 5 | "tasks": [ 6 | { 7 | "label": "pydoclint", 8 | "type": "shell", 9 | "command": "pydoclint", 10 | "args": ["."], 11 | "presentation": { 12 | "reveal": "never" 13 | }, 14 | "problemMatcher": { 15 | "owner": "pydoclint", 16 | "fileLocation": ["relative", "${workspaceFolder}"], 17 | "pattern": { 18 | "regexp": "^(.*?):(\\d+):\\s(.*?):\\s(.*)$", 19 | "file": 1, 20 | "line": 2, 21 | "code": 3, 22 | "message": 4 23 | } 24 | }, 25 | "group": { 26 | "kind": "none", 27 | "isDefault": true 28 | } 29 | } 30 | ] 31 | } 32 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # ansible-dev-environment 2 | 3 | A pip-like install for ansible collections. 4 | 5 | ## Features 6 | 7 | - Promotes an "ephemeral" development approach 8 | - Ensures the current development environment is isolated 9 | - Install all collection python requirements 10 | - Install all collection test requirements 11 | - Checks for missing system packages 12 | - Symlinks the current collection into the current python interpreter's 13 | site-packages 14 | - Install all collection collection dependencies into the current python 15 | interpreter's site-packages 16 | 17 | By placing collections into the python site-packages directory they are 18 | discoverable by ansible as well as python and pytest. 19 | 20 | ## Communication 21 | 22 | - Join the Ansible forum: 23 | 24 | - [Get Help](https://forum.ansible.com/c/help/6): get help or help others. 25 | Please add appropriate tags if you start new discussions. 26 | - [Social Spaces](https://forum.ansible.com/c/chat/4): gather and interact 27 | with fellow enthusiasts. 28 | - [News & Announcements](https://forum.ansible.com/c/news/5): track 29 | project-wide announcements including social events. 30 | - [Bullhorn newsletter](https://docs.ansible.com/ansible/devel/community/communication.html#the-bullhorn): 31 | used to announce releases and important changes. 32 | 33 | For more information about communication, see the 34 | [Ansible communication guide](https://docs.ansible.com/ansible/devel/community/communication.html). 35 | 36 | ## Usage 37 | 38 | ### Setting up a development environment 39 | 40 | ``` 41 | $ pip install ansible-dev-environment --user 42 | $ git clone 43 | $ cd 44 | $ ade install -e .\[test] --venv .venv 45 | INFO: Found collection name: network.interfaces from /home/bthornto/github/network.interfaces/galaxy.yml. 46 | INFO: Creating virtual environment: /home/bthornto/github/network.interfaces/venv 47 | INFO: Virtual environment: /home/bthornto/github/network.interfaces/venv 48 | INFO: Using specified interpreter: /home/bthornto/github/network.interfaces/venv/bin/python 49 | INFO: Requirements file /home/bthornto/github/network.interfaces/requirements.txt is empty, skipping 50 | INFO: Installing python requirements from /home/bthornto/github/network.interfaces/test-requirements.txt 51 | INFO: Installing ansible-core. 52 | INFO: Initializing build directory: /home/bthornto/github/network.interfaces/build 53 | INFO: Copying collection to build directory using git ls-files. 54 | INFO: Running ansible-galaxy to build collection. 55 | INFO: Running ansible-galaxy to install collection and it's dependencies. 56 | INFO: Removing installed /home/bthornto/github/network.interfaces/venv/lib64/python3.11/site-packages/ansible_collections/network/interfaces 57 | INFO: Symlinking /home/bthornto/github/network.interfaces/venv/lib64/python3.11/site-packages/ansible_collections/network/interfaces to /home/bthornto/github/network.interfaces 58 | WARNING: A virtual environment was specified but has not been activated. 59 | WARNING: Please activate the virtual environment: 60 | source venv/bin/activate 61 | ``` 62 | 63 | ### Tearing down the development environment 64 | 65 | ``` 66 | $ ade uninstall ansible.scm 67 | INFO Found collection name: ansible.scm from /home/bthornto/github/ansible.scm/galaxy.yml. 68 | INFO Requirements file /home/bthornto/github/ansible.scm/requirements.txt is empty, skipping 69 | INFO Uninstalling python requirements from /home/bthornto/github/ansible.scm/test-requirements.txt 70 | INFO Removed ansible.utils: /home/bthornto/github/ansible.scm/venv/lib64/python3.11/site-packages/ansible_collections/ansible/utils 71 | INFO Removed ansible.utils*.info: /home/bthornto/github/ansible.scm/venv/lib64/python3.11/site-packages/ansible_collections/ansible.utils-2.10.3.info 72 | INFO Removed ansible.scm: /home/bthornto/github/ansible.scm/venv/lib64/python3.11/site-packages/ansible_collections/ansible/scm 73 | INFO Removed collection namespace root: /home/bthornto/github/ansible.scm/venv/lib64/python3.11/site-packages/ansible_collections/ansible 74 | INFO Removed collection root: /home/bthornto/github/ansible.scm/venv/lib64/python3.11/site-packages/ansible_collections 75 | ``` 76 | 77 | ## Help 78 | 79 | `ade --help` 80 | 81 | `ade install --help` 82 | 83 | `ade uninstall --help` 84 | -------------------------------------------------------------------------------- /ansible.cfg: -------------------------------------------------------------------------------- 1 | [defaults] 2 | # isolate project testing from user local setup 3 | collections_path = . 4 | -------------------------------------------------------------------------------- /codecov.yml: -------------------------------------------------------------------------------- 1 | comment: false 2 | coverage: 3 | status: 4 | patch: true 5 | -------------------------------------------------------------------------------- /cspell.config.yaml: -------------------------------------------------------------------------------- 1 | dictionaryDefinitions: 2 | - name: words 3 | path: .config/dictionary.txt 4 | addWords: true 5 | dictionaries: 6 | - bash 7 | - networking-terms 8 | - python 9 | - words 10 | - "!aws" 11 | - "!backwards-compatibility" 12 | - "!cryptocurrencies" 13 | - "!cpp" 14 | ignorePaths: 15 | - .config/requirements* 16 | - \.* 17 | - cspell.config.yaml 18 | - mkdocs.yml 19 | - pyproject.toml 20 | - tox.ini 21 | 22 | languageSettings: 23 | - languageId: python 24 | allowCompoundWords: false 25 | -------------------------------------------------------------------------------- /docs/index.md: -------------------------------------------------------------------------------- 1 | # Ansible Development Environment 2 | 3 | A pip-like install for Ansible collections. 4 | 5 | ## Communication 6 | 7 | - Join the Ansible forum: 8 | 9 | - [Get Help](https://forum.ansible.com/c/help/6): get help or help others. 10 | Please add appropriate tags if you start new discussions. 11 | - [Social Spaces](https://forum.ansible.com/c/chat/4): gather and interact 12 | with fellow enthusiasts. 13 | - [News & Announcements](https://forum.ansible.com/c/news/5): track 14 | project-wide announcements including social events. 15 | - [Bullhorn newsletter](https://docs.ansible.com/ansible/devel/community/communication.html#the-bullhorn): 16 | used to announce releases and important changes. 17 | 18 | For more information about communication, see the 19 | [Ansible communication guide](https://docs.ansible.com/ansible/devel/community/communication.html). 20 | 21 | ## Features 22 | 23 | - Promotes an "ephemeral" development approach 24 | - Ensures the current development environment is isolated 25 | - Install all collection python requirements 26 | - Install all collection test requirements 27 | - Checks for missing system packages 28 | - Symlinks the current collection into the current python interpreter's 29 | site-packages 30 | - Install all collection collection dependencies into the current python 31 | interpreter's site-packages 32 | - Uses `uv env` instead of python's venv when available to boost performance. 33 | Can be disabled with `SKIP_UV=1` 34 | 35 | By placing collections into the python site-packages directory they are 36 | discoverable by ansible as well as python and pytest. 37 | 38 | ## Usage 39 | 40 | ### Setting up a development environment 41 | 42 | {{ install_from_adt("ansible-dev-environment") }} 43 | 44 | ```bash 45 | pip3 install ansible-dev-environment --user 46 | ``` 47 | 48 | ``` 49 | $ git clone 50 | $ cd 51 | $ ade install -e .\[test] --venv .venv 52 | INFO: Found collection name: network.interfaces from /home/bthornto/github/network.interfaces/galaxy.yml. 53 | INFO: Creating virtual environment: /home/bthornto/github/network.interfaces/venv 54 | INFO: Virtual environment: /home/bthornto/github/network.interfaces/venv 55 | INFO: Using specified interpreter: /home/bthornto/github/network.interfaces/venv/bin/python 56 | INFO: Requirements file /home/bthornto/github/network.interfaces/requirements.txt is empty, skipping 57 | INFO: Installing python requirements from /home/bthornto/github/network.interfaces/test-requirements.txt 58 | INFO: Installing ansible-core. 59 | INFO: Initializing build directory: /home/bthornto/github/network.interfaces/build 60 | INFO: Copying collection to build directory using git ls-files. 61 | INFO: Running ansible-galaxy to build collection. 62 | INFO: Running ansible-galaxy to install collection and it's dependencies. 63 | INFO: Removing installed /home/bthornto/github/network.interfaces/venv/lib64/python3.11/site-packages/ansible_collections/network/interfaces 64 | INFO: Symlinking /home/bthornto/github/network.interfaces/venv/lib64/python3.11/site-packages/ansible_collections/network/interfaces to /home/bthornto/github/network.interfaces 65 | WARNING: A virtual environment was specified but has not been activated. 66 | WARNING: Please activate the virtual environment: 67 | source venv/bin/activate 68 | ``` 69 | 70 | ### Tearing down the development environment 71 | 72 | ``` 73 | $ ade uninstall ansible.scm 74 | INFO Found collection name: ansible.scm from /home/bthornto/github/ansible.scm/galaxy.yml. 75 | INFO Requirements file /home/bthornto/github/ansible.scm/requirements.txt is empty, skipping 76 | INFO Uninstalling python requirements from /home/bthornto/github/ansible.scm/test-requirements.txt 77 | INFO Removed ansible.utils: /home/bthornto/github/ansible.scm/venv/lib64/python3.11/site-packages/ansible_collections/ansible/utils 78 | INFO Removed ansible.utils*.info: /home/bthornto/github/ansible.scm/venv/lib64/python3.11/site-packages/ansible_collections/ansible.utils-2.10.3.info 79 | INFO Removed ansible.scm: /home/bthornto/github/ansible.scm/venv/lib64/python3.11/site-packages/ansible_collections/ansible/scm 80 | INFO Removed collection namespace root: /home/bthornto/github/ansible.scm/venv/lib64/python3.11/site-packages/ansible_collections/ansible 81 | INFO Removed collection root: /home/bthornto/github/ansible.scm/venv/lib64/python3.11/site-packages/ansible_collections 82 | ``` 83 | 84 | ## Help 85 | 86 | `ade --help` 87 | 88 | `ade install --help` 89 | 90 | `ade uninstall --help` 91 | -------------------------------------------------------------------------------- /mise.toml: -------------------------------------------------------------------------------- 1 | [tools] 2 | python = "3.13" 3 | -------------------------------------------------------------------------------- /mkdocs.yml: -------------------------------------------------------------------------------- 1 | --- 2 | site_name: Ansible Development Environment Documentation 3 | site_url: https://ansible.readthedocs.io/projects/dev-environment/ 4 | repo_url: https://github.com/ansible/ansible-dev-environment 5 | edit_uri: blob/main/docs/ 6 | copyright: Copyright © 2023 Red Hat, Inc. 7 | docs_dir: docs 8 | strict: true 9 | 10 | theme: 11 | name: ansible 12 | features: 13 | - content.code.copy 14 | - content.action.edit 15 | - navigation.expand 16 | - navigation.sections 17 | - navigation.instant 18 | - navigation.indexes 19 | - navigation.tracking 20 | - toc.integrate 21 | 22 | extra: 23 | social: 24 | - icon: fontawesome/brands/python 25 | link: https://pypi.org/project/ansible-dev-environment/ 26 | name: PyPI 27 | - icon: fontawesome/solid/scroll 28 | link: https://github.com/ansible/ansible-dev-environment/releases 29 | name: Releases 30 | - icon: simple/mastodon 31 | link: https://fosstodon.org/@ansible 32 | name: Mastodon 33 | - icon: simple/matrix 34 | link: https://matrix.to/#/#devtools:ansible.com 35 | name: Matrix 36 | - icon: fontawesome/brands/discourse 37 | link: https://forum.ansible.com/c/project/7 38 | name: Ansible forum 39 | - icon: fontawesome/brands/github-alt 40 | link: https://github.com/ansible/ansible-dev-environment 41 | name: GitHub 42 | 43 | nav: 44 | - Home: index.md 45 | 46 | plugins: 47 | - autorefs 48 | - macros: 49 | modules: [mkdocs-ansible:mkdocs_ansible] 50 | - markdown-exec 51 | - material/search: 52 | separator: '[\s\-,:!=\[\]()"`/]+|\.(?!\d)|&[lg]t;|(?!\b)(?=[A-Z][a-z])' 53 | # https://github.com/squidfunk/mkdocs-material/issues/6983 54 | # - material/social 55 | - material/tags 56 | - mkdocstrings: 57 | handlers: 58 | python: 59 | paths: [src] 60 | options: 61 | # Sphinx is for historical reasons, but we could consider switching if needed 62 | # https://mkdocstrings.github.io/griffe/docstrings/ 63 | docstring_style: sphinx 64 | merge_init_into_class: yes 65 | show_submodules: yes 66 | import: 67 | - url: https://docs.ansible.com/ansible/latest/objects.inv 68 | domains: [py, std] 69 | 70 | markdown_extensions: 71 | - markdown_include.include: 72 | base_path: docs 73 | - admonition 74 | - def_list 75 | - footnotes 76 | - pymdownx.highlight: 77 | anchor_linenums: true 78 | - pymdownx.inlinehilite 79 | - pymdownx.superfences 80 | - pymdownx.magiclink: 81 | repo_url_shortener: true 82 | repo_url_shorthand: true 83 | social_url_shorthand: true 84 | social_url_shortener: true 85 | user: facelessuser 86 | repo: pymdown-extensions 87 | normalize_issue_symbols: true 88 | - pymdownx.tabbed: 89 | alternate_style: true 90 | - toc: 91 | toc_depth: 2 92 | permalink: true 93 | - pymdownx.superfences: 94 | custom_fences: 95 | - name: mermaid 96 | class: mermaid 97 | format: !!python/name:pymdownx.superfences.fence_code_format 98 | - name: python 99 | class: python 100 | validator: !!python/name:markdown_exec.validator 101 | format: !!python/name:markdown_exec.formatter 102 | -------------------------------------------------------------------------------- /src/ansible_dev_environment/__init__.py: -------------------------------------------------------------------------------- 1 | """Root init.""" 2 | -------------------------------------------------------------------------------- /src/ansible_dev_environment/__main__.py: -------------------------------------------------------------------------------- 1 | """A runpy entry point for ansible-dev-environment. 2 | 3 | This makes it possible to invoke CLI 4 | via :command:`python -m ansible_dev_environment`. 5 | """ 6 | 7 | from __future__ import annotations 8 | 9 | from ansible_dev_environment.cli import main 10 | 11 | 12 | if __name__ == "__main__": 13 | main() 14 | -------------------------------------------------------------------------------- /src/ansible_dev_environment/cli.py: -------------------------------------------------------------------------------- 1 | # PYTHON_ARGCOMPLETE_OK 2 | """CLI entrypoint.""" 3 | 4 | from __future__ import annotations 5 | 6 | import os 7 | import sys 8 | import warnings 9 | 10 | from pathlib import Path 11 | from typing import TYPE_CHECKING 12 | 13 | from ansible_dev_environment import subcommands 14 | 15 | from .arg_parser import parse 16 | from .config import Config 17 | from .definitions import COLLECTIONS_PATH as CP 18 | from .definitions import AnsibleCfg 19 | from .output import Output 20 | from .utils import TermFeatures 21 | 22 | 23 | if TYPE_CHECKING: 24 | from argparse import Namespace 25 | 26 | 27 | class Cli: 28 | """The Cli class.""" 29 | 30 | def __init__(self) -> None: 31 | """Initialize the CLI and parse CLI args.""" 32 | self.args: Namespace 33 | self.config: Config 34 | self.output: Output 35 | self.term_features: TermFeatures 36 | self.acfg_cwd = AnsibleCfg(path=Path("./ansible.cfg")) 37 | self.acfg_home = AnsibleCfg(path=Path("~/.ansible.cfg").expanduser().resolve()) 38 | self.acfg_system = AnsibleCfg(path=Path("/etc/ansible/ansible.cfg")) 39 | self.acfg_trusted: Path | None 40 | 41 | def parse_args(self) -> None: 42 | """Parse the command line arguments.""" 43 | self.args = parse() 44 | if hasattr(self.args, "requirement") and self.args.requirement: 45 | self.args.requirement = Path(self.args.requirement).expanduser().resolve() 46 | if self.args.cpi: 47 | self.args.requirement = Path(".config/source-requirements.yml").expanduser().resolve() 48 | 49 | def init_output(self) -> None: 50 | """Initialize the output object.""" 51 | if not sys.stdout.isatty(): 52 | self.term_features = TermFeatures(color=False, links=False) 53 | else: 54 | self.term_features = TermFeatures( 55 | color=self.args.ansi, 56 | links=self.args.ansi, 57 | ) 58 | 59 | self.output = Output( 60 | log_append=self.args.log_append, 61 | log_file=self.args.log_file, 62 | log_level=self.args.log_level, 63 | term_features=self.term_features, 64 | verbosity=self.args.verbose, 65 | ) 66 | 67 | def args_sanity(self) -> None: 68 | """Perform some sanity checking on the args.""" 69 | # Ensure PATH is not broken (~ should not present as many tools do not expand it) 70 | if "~" in os.environ.get("PATH", ""): 71 | err = "~ character was found inside PATH, correct your environment configuration to avoid it. See https://stackoverflow.com/a/44704799/99834" 72 | self.output.critical(err) 73 | # Missing args 74 | if ( 75 | hasattr(self.args, "requirement") 76 | and self.args.requirement 77 | and not self.args.requirement.exists() 78 | ): 79 | err = f"Requirements file not found: {self.args.requirement}" 80 | self.output.critical(err) 81 | 82 | # Multiple editable collections 83 | if ( 84 | hasattr(self.args, "collection_specifier") 85 | and len(self.args.collection_specifier) > 1 86 | and hasattr(self.args, "editable") 87 | and self.args.editable 88 | ): 89 | err = "Editable can only be used with a single collection specifier." 90 | self.output.critical(err) 91 | 92 | # Editable with requirements file 93 | if ( 94 | hasattr(self.args, "requirement") 95 | and self.args.requirement 96 | and hasattr(self.args, "editable") 97 | and self.args.editable 98 | ): 99 | err = "Editable can not be used with a requirements file." 100 | self.output.critical(err) 101 | 102 | self.output.debug("Arguments sanity check passed.") 103 | for arg in vars(self.args): 104 | self.output.debug(f"{arg}: {getattr(self.args, arg)}") 105 | 106 | def isolation_check(self) -> bool: 107 | """Check the environment for isolation. 108 | 109 | Returns: 110 | True if ade can continue, false otherwise. 111 | """ 112 | if not hasattr(self.args, "isolation_mode"): 113 | return True 114 | if self.args.isolation_mode == "restrictive": 115 | return self.isolation_restrictive() 116 | if self.args.isolation_mode == "cfg": 117 | return self.isolation_cfg() 118 | if self.args.isolation_mode == "none": 119 | return self.isolation_none() 120 | self.acfg_trusted = None 121 | return False 122 | 123 | def isolation_cfg(self) -> bool: 124 | """Ensure the environment is isolated using cfg isolation. 125 | 126 | Returns: 127 | True if ade can continue, false otherwise. 128 | """ 129 | if os.environ.get("ANSIBLE_CONFIG"): 130 | err = "ANSIBLE_CONFIG is set" 131 | self.output.error(err) 132 | hint = "Run `unset ANSIBLE_CONFIG` to unset it using cfg isolation mode." 133 | self.output.hint(hint) 134 | self.acfg_trusted = None 135 | return False 136 | 137 | if self.acfg_cwd.exists: 138 | if self.acfg_cwd.collections_path_is_dot: 139 | msg = f"{self.acfg_cwd.path} has '{CP}' which isolates this workspace." 140 | self.output.info(msg) 141 | else: 142 | self.acfg_cwd.set_or_update_collections_path() 143 | msg = f"{self.acfg_cwd.path} updated with '{CP}' to isolate this workspace." 144 | self.output.warning(msg) 145 | self.acfg_trusted = self.acfg_cwd.path 146 | return True 147 | 148 | if self.acfg_home.exists: 149 | if self.acfg_home.collections_path_is_dot: 150 | msg = f"{self.acfg_home.path} has '{CP}' which isolates this and all workspaces." 151 | self.output.info(msg) 152 | else: 153 | self.acfg_home.set_or_update_collections_path() 154 | msg = ( 155 | f"{self.acfg_home.path} updated with '{CP}' to isolate this and all workspaces." 156 | ) 157 | self.output.warning(msg) 158 | self.acfg_trusted = self.acfg_home.path 159 | return True 160 | 161 | if self.acfg_system.exists and self.acfg_system.collections_path_is_dot: 162 | msg = f"{self.acfg_system.path} has '{CP}' which isolates this and all workspaces." 163 | self.output.info(msg) 164 | self.acfg_trusted = self.acfg_system.path 165 | return True 166 | 167 | self.acfg_cwd.author_new() 168 | msg = f"{self.acfg_cwd.path} created with '{CP}' to isolate this workspace." 169 | self.output.info(msg) 170 | self.acfg_trusted = self.acfg_cwd.path 171 | return True 172 | 173 | def isolation_none(self) -> bool: 174 | """No isolation. 175 | 176 | Returns: 177 | True if ade can continue, false otherwise. 178 | """ 179 | self.output.warning( 180 | "An unisolated development environment can cause issues with conflicting dependency" 181 | " versions and the use of incompatible collections.", 182 | ) 183 | self.acfg_trusted = None 184 | return True 185 | 186 | def isolation_restrictive(self) -> bool: 187 | """Ensure the environment is isolated. 188 | 189 | Returns: 190 | True if ade can continue, false otherwise. 191 | """ 192 | env_vars = os.environ 193 | errored = False 194 | if "ANSIBLE_COLLECTIONS_PATHS" in env_vars: 195 | err = "ANSIBLE_COLLECTIONS_PATHS is set" 196 | self.output.error(err) 197 | hint = "Run `unset ANSIBLE_COLLECTIONS_PATHS` to unset it." 198 | self.output.hint(hint) 199 | errored = True 200 | if "ANSIBLE_COLLECTION_PATH" in env_vars: 201 | err = "ANSIBLE_COLLECTION_PATH is set" 202 | self.output.error(err) 203 | hint = "Run `unset ANSIBLE_COLLECTION_PATH` to unset it." 204 | self.output.hint(hint) 205 | errored = True 206 | 207 | home_coll = ( 208 | Path(os.environ.get("ANSIBLE_HOME", "~/.ansible")).expanduser() 209 | / "collections/ansible_collections" 210 | ) 211 | if home_coll.exists() and tuple(home_coll.iterdir()): 212 | err = f"Collections found in {home_coll}" 213 | self.output.error(err) 214 | hint = f"Run `rm -rf {home_coll}` to remove them or configure ANSIBLE_HOME to point to a different location." 215 | self.output.hint(hint) 216 | errored = True 217 | 218 | usr_coll = Path("/usr/share/ansible/collections") 219 | if usr_coll.exists() and tuple(usr_coll.iterdir()): 220 | err = f"Collections found in {usr_coll}" 221 | self.output.error(err) 222 | hint = "Run `sudo rm -rf /usr/share/ansible/collections` to remove them." 223 | self.output.hint(hint) 224 | errored = True 225 | if errored: 226 | err = "The development environment is not isolated, please resolve the above errors." 227 | self.output.warning(err) 228 | return False 229 | return True 230 | 231 | def run(self) -> None: 232 | """Run the application.""" 233 | self.config = Config( 234 | args=self.args, 235 | output=self.output, 236 | term_features=self.term_features, 237 | ) 238 | self.config.init() 239 | 240 | subcommand_cls = getattr(subcommands, self.config.args.subcommand.capitalize()) 241 | subcommand = subcommand_cls(config=self.config, output=self.output) 242 | subcommand.run() 243 | self.exit() 244 | 245 | def exit(self) -> None: 246 | """Exit the application setting the return code.""" 247 | if self.output.call_count["error"]: 248 | sys.exit(1) 249 | if self.output.call_count["warning"]: 250 | sys.exit(2) 251 | sys.exit(0) 252 | 253 | 254 | def main(*, dry: bool = False) -> None: 255 | """Entry point for ansible-creator CLI. 256 | 257 | Args: 258 | dry: Skip main execution, used internally for testing. 259 | """ 260 | with warnings.catch_warnings(record=True) as warns: 261 | warnings.simplefilter(action="default") 262 | cli = Cli() 263 | cli.parse_args() 264 | cli.init_output() 265 | for warn in warns: 266 | cli.output.warning(str(warn.message)) 267 | warnings.resetwarnings() 268 | cli.args_sanity() 269 | if not cli.isolation_check(): 270 | cli.exit() 271 | if not dry: 272 | cli.run() 273 | -------------------------------------------------------------------------------- /src/ansible_dev_environment/collection.py: -------------------------------------------------------------------------------- 1 | """A collection abstraction.""" 2 | 3 | from __future__ import annotations 4 | 5 | import re 6 | 7 | from dataclasses import dataclass 8 | from pathlib import Path 9 | from typing import TYPE_CHECKING 10 | 11 | import yaml 12 | 13 | 14 | if TYPE_CHECKING: 15 | from .config import Config 16 | from .output import Output 17 | 18 | 19 | @dataclass 20 | class Collection: # pylint: disable=too-many-instance-attributes 21 | """A collection request specification. 22 | 23 | Attributes: 24 | config: The configuration object 25 | path: The collection path 26 | opt_deps: The optional dependencies 27 | local: A boolean indicating if the collection is local 28 | cnamespace: The collection namespace 29 | cname: The collection name 30 | csource: The collection source 31 | specifier: The collection specifier 32 | original: The original collection request 33 | """ 34 | 35 | config: Config 36 | path: Path 37 | opt_deps: str 38 | local: bool 39 | cnamespace: str 40 | cname: str 41 | csource: list[str] 42 | specifier: str 43 | original: str 44 | 45 | @property 46 | def name(self) -> str: 47 | """Return the collection name.""" 48 | return f"{self.cnamespace}.{self.cname}" 49 | 50 | @property 51 | def cache_dir(self) -> Path: 52 | """Return the collection cache directory.""" 53 | collection_cache_dir = self.config.venv_cache_dir / self.name 54 | if not collection_cache_dir.exists(): 55 | collection_cache_dir.mkdir() 56 | return collection_cache_dir 57 | 58 | @property 59 | def build_dir(self) -> Path: 60 | """Return the collection cache directory.""" 61 | collection_build_dir = self.cache_dir / "build" 62 | if not collection_build_dir.exists(): 63 | collection_build_dir.mkdir() 64 | return collection_build_dir 65 | 66 | @property 67 | def site_pkg_path(self) -> Path: 68 | """Return the site packages collection path. 69 | 70 | Returns: 71 | The site packages collection path 72 | """ 73 | return self.config.site_pkg_collections_path / self.cnamespace / self.cname 74 | 75 | 76 | def parse_collection_request( # noqa: PLR0915 77 | string: str, 78 | config: Config, 79 | output: Output, 80 | ) -> Collection: 81 | """Parse a collection request str. 82 | 83 | Args: 84 | string: The collection request string 85 | config: The configuration object 86 | output: The output object 87 | 88 | Raises: 89 | SystemExit: If the collection request is invalid 90 | Returns: 91 | A collection object 92 | """ 93 | # spec with dep, local 94 | if "[" in string and "]" in string: 95 | msg = f"Found optional dependencies in collection request: {string}" 96 | output.debug(msg) 97 | path = Path(string.split("[")[0]).expanduser().resolve() 98 | if not path.exists(): 99 | msg = "Provide an existing path to a collection when specifying optional dependencies." 100 | output.hint(msg) 101 | msg = f"Failed to find collection path: {path}" 102 | output.critical(msg) 103 | msg = f"Found local collection request with dependencies: {string}" 104 | output.debug(msg) 105 | msg = f"Setting collection path: {path}" 106 | output.debug(msg) 107 | opt_deps = string.split("[")[1].split("]")[0] 108 | msg = f"Setting optional dependencies: {opt_deps}" 109 | output.debug(msg) 110 | local = True 111 | msg = "Setting request as local" 112 | output.debug(msg) 113 | collection = Collection( 114 | config=config, 115 | path=path, 116 | opt_deps=opt_deps, 117 | local=local, 118 | cnamespace="", 119 | cname="", 120 | csource=[], 121 | specifier="", 122 | original=string, 123 | ) 124 | get_galaxy(collection=collection, output=output) 125 | return collection 126 | # spec without dep, local 127 | path = Path(string).expanduser().resolve() 128 | if path.exists(): 129 | msg = f"Found local collection request without dependencies: {string}" 130 | output.debug(msg) 131 | msg = f"Setting collection path: {path}" 132 | output.debug(msg) 133 | msg = "Setting request as local" 134 | output.debug(msg) 135 | local = True 136 | collection = Collection( 137 | config=config, 138 | path=path, 139 | opt_deps="", 140 | local=local, 141 | cnamespace="", 142 | cname="", 143 | csource=[], 144 | specifier="", 145 | original=string, 146 | ) 147 | get_galaxy(collection=collection, output=output) 148 | return collection 149 | non_local_re = re.compile( 150 | r""" 151 | (?P[A-Za-z0-9]+) # collection name 152 | \. # dot 153 | (?P[A-Za-z0-9]+) # collection name 154 | (?P[^A-Za-z0-9].*)? # optional specifier 155 | """, 156 | re.VERBOSE, 157 | ) 158 | matched = non_local_re.match(string) 159 | if not matched: 160 | msg = "Specify a valid collection name (ns.n) with an optional version specifier" 161 | output.hint(msg) 162 | msg = f"Failed to parse collection request: {string}" 163 | output.critical(msg) 164 | raise SystemExit(1) # pragma: no cover # (critical is a sys.exit) 165 | msg = f"Found non-local collection request: {string}" 166 | output.debug(msg) 167 | 168 | cnamespace = matched.group("cnamespace") 169 | msg = f"Setting collection namespace: {cnamespace}" 170 | output.debug(msg) 171 | 172 | cname = matched.group("cname") 173 | msg = f"Setting collection name: {cname}" 174 | output.debug(msg) 175 | 176 | if matched.group("specifier"): 177 | specifier = matched.group("specifier") 178 | msg = f"Setting collection specifier: {specifier}" 179 | output.debug(msg) 180 | else: 181 | specifier = "" 182 | msg = "Setting collection specifier as empty" 183 | output.debug(msg) 184 | 185 | local = False 186 | msg = "Setting request as non-local" 187 | output.debug(msg) 188 | 189 | return Collection( 190 | config=config, 191 | path=Path(), 192 | opt_deps="", 193 | local=local, 194 | cnamespace=cnamespace, 195 | cname=cname, 196 | csource=[], 197 | specifier=specifier, 198 | original=string, 199 | ) 200 | 201 | 202 | def get_galaxy(collection: Collection, output: Output) -> None: 203 | """Retrieve the collection name from the galaxy.yml file. 204 | 205 | Args: 206 | collection: A collection object 207 | output: The output object 208 | Raises: 209 | SystemExit: If the collection name is not found 210 | """ 211 | file_name = collection.path / "galaxy.yml" 212 | if not file_name.exists(): 213 | err = f"Failed to find {file_name} in {collection.path}" 214 | output.critical(err) 215 | 216 | with file_name.open(encoding="utf-8") as fileh: 217 | try: 218 | yaml_file = yaml.safe_load(fileh) 219 | except yaml.YAMLError as exc: 220 | err = f"Failed to load yaml file: {exc}" 221 | output.critical(err) 222 | 223 | try: 224 | collection.cnamespace = yaml_file["namespace"] 225 | collection.cname = yaml_file["name"] 226 | msg = f"Found collection name: {collection.name} from {file_name}." 227 | output.debug(msg) 228 | except KeyError as exc: 229 | err = f"Failed to find collection name in {file_name}: {exc}" 230 | output.critical(err) 231 | else: 232 | return 233 | raise SystemExit(1) # pragma: no cover # (critical is a sys.exit) 234 | -------------------------------------------------------------------------------- /src/ansible_dev_environment/config.py: -------------------------------------------------------------------------------- 1 | """Constants, for now, for ansible-dev-environment.""" 2 | 3 | from __future__ import annotations 4 | 5 | import json 6 | import shutil 7 | import subprocess 8 | import sys 9 | 10 | from functools import cached_property 11 | from pathlib import Path 12 | from typing import TYPE_CHECKING 13 | 14 | from .utils import subprocess_run 15 | 16 | 17 | if TYPE_CHECKING: 18 | from argparse import Namespace 19 | 20 | from .output import Output 21 | from .utils import TermFeatures 22 | 23 | 24 | class Config: # pylint: disable=too-many-instance-attributes 25 | """The application configuration.""" 26 | 27 | def __init__( 28 | self, 29 | args: Namespace, 30 | output: Output, 31 | term_features: TermFeatures, 32 | ) -> None: 33 | """Initialize the configuration. 34 | 35 | Args: 36 | args: The command line arguments 37 | output: The output object 38 | term_features: The terminal features 39 | """ 40 | self._create_venv: bool = False 41 | self.args: Namespace = args 42 | self.bindir: Path 43 | self._output: Output = output 44 | self.python_path: Path 45 | self.site_pkg_path: Path 46 | self.specified_python: str | Path | None = None 47 | self.venv_interpreter: Path 48 | self.term_features: TermFeatures = term_features 49 | 50 | def init(self) -> None: 51 | """Initialize the configuration.""" 52 | if self.args.subcommand == "install": 53 | self._create_venv = True 54 | 55 | self._set_interpreter() 56 | self._set_site_pkg_path() 57 | 58 | @property 59 | def cache_dir(self) -> Path: 60 | """Return the cache directory.""" 61 | cache_dir = self.venv / ".ansible-dev-environment" 62 | if not cache_dir.exists(): 63 | cache_dir.mkdir(parents=True) 64 | return cache_dir 65 | 66 | @property 67 | def venv(self) -> Path: 68 | """Return the virtual environment path.""" 69 | return Path(self.args.venv).expanduser().resolve() 70 | 71 | @property 72 | def venv_cache_dir(self) -> Path: 73 | """Return the virtual environment cache directory.""" 74 | return self.cache_dir 75 | 76 | @property 77 | def venv_pip_install_cmd(self) -> str: 78 | """Return the pip command for the virtual environment. 79 | 80 | Returns: 81 | The pip install command for the virtual environment. 82 | """ 83 | if self.uv_available: 84 | return f"uv pip install --python {self.venv_interpreter}" 85 | return f"{self.venv}/bin/python -m pip install" 86 | 87 | @cached_property 88 | def uv_available(self) -> bool: 89 | """Return whether to use uv commands like venv or pip. 90 | 91 | Returns: 92 | True if uv is to be used. 93 | """ 94 | if self.args.uv is False: 95 | self._output.debug("uv is disabled.") 96 | return False 97 | 98 | if not (uv_path := shutil.which("uv")): 99 | self._output.debug("uv is not available in the environment.") 100 | return False 101 | 102 | self._output.debug(f"uv is available at {uv_path}") 103 | self._output.info( 104 | "uv is available and will be used instead of venv/pip. Disable with 'ADE_UV=0' or '--uv false'.", 105 | ) 106 | return True 107 | 108 | @property 109 | def discovered_python_reqs(self) -> Path: 110 | """Return the discovered python requirements file.""" 111 | return self.venv_cache_dir / "discovered_requirements.txt" 112 | 113 | @property 114 | def discovered_bindep_reqs(self) -> Path: 115 | """Return the discovered system package requirements file.""" 116 | return self.venv_cache_dir / "discovered_bindep.txt" 117 | 118 | @property 119 | def site_pkg_collections_path(self) -> Path: 120 | """Return the site packages collection path.""" 121 | site_pkg_collections_path = self.site_pkg_path / "ansible_collections" 122 | if not site_pkg_collections_path.exists(): 123 | site_pkg_collections_path.mkdir() 124 | return site_pkg_collections_path 125 | 126 | @property 127 | def venv_bindir(self) -> Path: 128 | """Return the virtual environment bin directory.""" 129 | return self.venv / "bin" 130 | 131 | @property 132 | def interpreter(self) -> Path: 133 | """Return the current interpreter.""" 134 | return Path(sys.executable) 135 | 136 | @property 137 | def galaxy_bin(self) -> Path: 138 | """Find the ansible galaxy command. 139 | 140 | Prefer the venv over the system package over the PATH. 141 | 142 | Raises: 143 | SystemExit: If the command cannot be found. 144 | """ 145 | within_venv = self.venv_bindir / "ansible-galaxy" 146 | if within_venv.exists(): 147 | msg = f"Found ansible-galaxy in virtual environment: {within_venv}" 148 | self._output.debug(msg) 149 | return within_venv 150 | system_pkg = self.site_pkg_path / "bin" / "ansible-galaxy" 151 | if system_pkg.exists(): 152 | msg = f"Found ansible-galaxy in system packages: {system_pkg}" 153 | self._output.debug(msg) 154 | return system_pkg 155 | last_resort = shutil.which("ansible-galaxy") 156 | if last_resort: 157 | msg = f"Found ansible-galaxy in PATH: {last_resort}" 158 | self._output.debug(msg) 159 | return Path(last_resort) 160 | msg = "Failed to find ansible-galaxy." 161 | self._output.critical(msg) 162 | raise SystemExit(1) # pragma: no cover # critical exits 163 | 164 | def _set_interpreter( 165 | self, 166 | ) -> None: 167 | """Set the interpreter.""" 168 | self._locate_python() 169 | if self.specified_python is None: 170 | venv_cmd = ( 171 | "uv venv --seed --python-preference=system" 172 | if self.uv_available 173 | else f"{sys.executable} -m venv" 174 | ) 175 | else: 176 | venv_cmd = ( 177 | f"uv venv --seed --python-preference=system --python {self.specified_python}" 178 | if self.uv_available 179 | else f"{self.specified_python} -m venv" 180 | ) 181 | 182 | if self.venv.exists() and self.specified_python: 183 | err = "User specified python cannot be used with an existing virtual environment." 184 | self._output.critical(err) 185 | return # pragma: no cover # critical exits 186 | 187 | if not self.venv.exists() and not self._create_venv: 188 | err = f"Cannot find virtual environment: {self.venv}." 189 | self._output.critical(err) 190 | return # pragma: no cover # critical exits 191 | 192 | if not self.venv.exists(): 193 | msg = f"Creating virtual environment: {self.venv}" 194 | command = f"{venv_cmd} {self.venv}" 195 | if self.args.system_site_packages: 196 | command = f"{command} --system-site-packages" 197 | msg += " with system site packages" 198 | self._output.debug(msg) 199 | try: 200 | subprocess_run( 201 | command=command, 202 | verbose=self.args.verbose, 203 | msg=msg, 204 | output=self._output, 205 | ) 206 | msg = f"Created virtual environment: {self.venv}" 207 | if self.specified_python: 208 | msg += f" using {self.specified_python}" 209 | self._output.note(msg) 210 | except subprocess.CalledProcessError as exc: 211 | err = f"Failed to create virtual environment: {exc.stdout} {exc.stderr}" 212 | self._output.critical(err) 213 | return # pragma: no cover # critical exits 214 | 215 | msg = f"Virtual environment: {self.venv}" 216 | self._output.debug(msg) 217 | venv_interpreter = self.venv / "bin" / "python" 218 | if not venv_interpreter.exists(): 219 | err = f"Cannot find interpreter: {venv_interpreter}." 220 | self._output.critical(err) 221 | 222 | msg = f"Virtual environment interpreter: {venv_interpreter}" 223 | self._output.debug(msg) 224 | self.venv_interpreter = venv_interpreter 225 | 226 | def _set_site_pkg_path(self) -> None: 227 | """Use the interpreter to find the site packages path.""" 228 | command = ( 229 | f"{self.venv_interpreter} -c" 230 | "'import json,sysconfig; print(json.dumps(sysconfig.get_paths()))'" 231 | ) 232 | work = "Locating site packages directory" 233 | try: 234 | proc = subprocess_run( 235 | command=command, 236 | verbose=self.args.verbose, 237 | msg=work, 238 | output=self._output, 239 | ) 240 | except subprocess.CalledProcessError as exc: 241 | err = f"Failed to find site packages path: {exc}" 242 | self._output.critical(err) 243 | 244 | try: 245 | sysconfig_paths = json.loads(proc.stdout) 246 | except json.JSONDecodeError as exc: 247 | err = f"Failed to decode json: {exc}" 248 | self._output.critical(err) 249 | 250 | if not sysconfig_paths: 251 | err = "Failed to find site packages path." 252 | self._output.critical(err) 253 | 254 | purelib = sysconfig_paths.get("purelib") 255 | if not purelib: 256 | err = "Failed to find purelib in sysconfig paths." 257 | self._output.critical(err) 258 | 259 | self.site_pkg_path = Path(purelib) 260 | msg = f"Found site packages path: {self.site_pkg_path}" 261 | self._output.debug(msg) 262 | 263 | def _locate_python(self) -> None: 264 | """Locate the python interpreter. 265 | 266 | 1) If not user provided default to system 267 | 2) If it is a path and exists, use that 268 | 3) If it starts with python and uv, use that 269 | 4) If it starts with python and pip, use that 270 | 5) If it is a version and uv, use that 271 | 6) If it is a version and pip, and found, use that 272 | 273 | """ 274 | python_arg = getattr(self.args, "python", None) 275 | if not python_arg: 276 | return 277 | 278 | if Path(python_arg).exists(): 279 | self.specified_python = Path(python_arg).expanduser().resolve() 280 | elif python_arg.lower().startswith("python"): 281 | if self.uv_available: 282 | self.specified_python = python_arg 283 | elif path := shutil.which(python_arg): 284 | self.specified_python = path 285 | else: 286 | msg = f"Cannot find specified python interpreter. ({python_arg})" 287 | self._output.critical(msg) 288 | return # pragma: no cover # critical exits 289 | else: 290 | possible = f"python{python_arg}" 291 | if self.uv_available: 292 | self.specified_python = possible 293 | elif path := shutil.which(possible): 294 | self.specified_python = path 295 | else: 296 | msg = f"Cannot find specified python interpreter. ({possible})" 297 | self._output.critical(msg) 298 | return # pragma: no cover # critical exits 299 | self._output.debug( 300 | f"Using specified python interpreter: {self.specified_python}", 301 | ) 302 | return 303 | -------------------------------------------------------------------------------- /src/ansible_dev_environment/definitions.py: -------------------------------------------------------------------------------- 1 | """Some common definitions.""" 2 | 3 | from __future__ import annotations 4 | 5 | from configparser import ConfigParser 6 | from dataclasses import dataclass 7 | from typing import TYPE_CHECKING 8 | 9 | 10 | if TYPE_CHECKING: 11 | from pathlib import Path 12 | 13 | COLLECTIONS_PATH = "collections_path = ." 14 | 15 | 16 | @dataclass 17 | class AnsibleCfg: 18 | """ansible.cfg file abstraction. 19 | 20 | Attributes: 21 | path: Path to the ansible.cfg file. 22 | """ 23 | 24 | path: Path 25 | 26 | @property 27 | def exists(self) -> bool: 28 | """Check if the ansible.cfg file exists.""" 29 | return self.path.exists() 30 | 31 | @property 32 | def collections_path_is_dot(self) -> bool: 33 | """Check if the collection path is a dot. 34 | 35 | Returns: 36 | bool: True if the collection path is a dot. 37 | """ 38 | config = ConfigParser() 39 | config.read(self.path) 40 | return config.get("defaults", "collections_path", fallback=None) == "." 41 | 42 | def set_or_update_collections_path(self) -> None: 43 | """Set or update the collection path in the ansible.cfg file. 44 | 45 | The configparser doesn't preserve comments, so we need to read the file 46 | and write it back with the new collection path. 47 | """ 48 | contents = self.path.read_text().splitlines() 49 | 50 | if "[defaults]" not in contents: 51 | contents.insert(0, "[defaults]") 52 | 53 | idx = [i for i, line in enumerate(contents) if line.startswith("collections_path")] 54 | 55 | if idx: 56 | contents[idx[0]] = COLLECTIONS_PATH 57 | else: 58 | insert_at = contents.index("[defaults]") + 1 59 | contents.insert(insert_at, COLLECTIONS_PATH) 60 | 61 | with self.path.open(mode="w") as file: 62 | file.write("\n".join(contents) + "\n") 63 | 64 | def author_new(self) -> None: 65 | """Author the file and update it.""" 66 | contents = ["[defaults]", COLLECTIONS_PATH] 67 | with self.path.open(mode="w") as file: 68 | file.write("\n".join(contents) + "\n") 69 | -------------------------------------------------------------------------------- /src/ansible_dev_environment/output.py: -------------------------------------------------------------------------------- 1 | """Output functionality.""" 2 | 3 | from __future__ import annotations 4 | 5 | import decimal 6 | import logging 7 | import shutil 8 | import sys 9 | import textwrap 10 | 11 | from dataclasses import dataclass 12 | from enum import Enum 13 | from pathlib import Path 14 | from typing import TYPE_CHECKING 15 | 16 | 17 | if TYPE_CHECKING: 18 | from .utils import TermFeatures 19 | 20 | 21 | GOLDEN_RATIO = 1.61803398875 22 | 23 | 24 | def round_half_up(number: float) -> int: 25 | """Round a number to the nearest integer with ties going away from zero. 26 | 27 | This is different the round() where exact halfway cases are rounded to the nearest 28 | even result instead of away from zero. (e.g. round(2.5) = 2, round(3.5) = 4). 29 | 30 | This will always round based on distance from zero. (e.g round(2.5) = 3, round(3.5) = 4). 31 | 32 | Args: 33 | number: The number to round 34 | Returns: 35 | The rounded number as an it 36 | """ 37 | rounded = decimal.Decimal(number).quantize( 38 | decimal.Decimal("1"), 39 | rounding=decimal.ROUND_HALF_UP, 40 | ) 41 | return int(rounded) 42 | 43 | 44 | def console_width() -> int: 45 | """Get a console width based on common screen widths. 46 | 47 | Returns: 48 | The console width 49 | """ 50 | medium = 80 51 | wide = 132 52 | width = shutil.get_terminal_size().columns 53 | if width <= medium: 54 | return width 55 | if width <= wide: 56 | return max(80, round_half_up(width / GOLDEN_RATIO)) 57 | return wide 58 | 59 | 60 | class Color: 61 | """Color constants.""" 62 | 63 | BLACK = "\033[30m" 64 | RED = "\033[31m" 65 | GREEN = "\033[32m" 66 | YELLOW = "\033[33m" 67 | BLUE = "\033[34m" 68 | MAGENTA = "\033[35m" 69 | CYAN = "\033[36m" 70 | WHITE = "\033[37m" 71 | GREY = "\033[90m" # Bright black? 72 | BRIGHT_RED = "\033[91m" 73 | BRIGHT_GREEN = "\033[92m" 74 | BRIGHT_YELLOW = "\033[93m" 75 | BRIGHT_BLUE = "\033[94m" 76 | BRIGHT_MAGENTA = "\033[95m" 77 | BRIGHT_CYAN = "\033[96m" 78 | BRIGHT_WHITE = "\033[97m" 79 | END = "\033[0m" 80 | 81 | 82 | class Level(Enum): 83 | """An exit message prefix.""" 84 | 85 | CRITICAL = "Critical" 86 | DEBUG = "Debug" 87 | ERROR = "Error" 88 | HINT = "Hint" 89 | INFO = "Info" 90 | NOTE = "Note" 91 | WARNING = "Warning" 92 | 93 | @property 94 | def log_level(self) -> int: 95 | """Return a log level. 96 | 97 | :returns: The log level 98 | """ 99 | mapping = { 100 | Level.CRITICAL: logging.CRITICAL, 101 | Level.DEBUG: logging.DEBUG, 102 | Level.ERROR: logging.ERROR, 103 | Level.HINT: logging.INFO, 104 | Level.INFO: logging.INFO, 105 | Level.NOTE: logging.INFO, 106 | Level.WARNING: logging.WARNING, 107 | } 108 | return mapping[self] 109 | 110 | @classmethod 111 | def _longest_name(cls) -> int: 112 | """Return the longest exit message prefix. 113 | 114 | Returns: 115 | The longest exit message prefix 116 | """ 117 | return max(len(member.value) for member in cls) 118 | 119 | @classmethod 120 | def longest_formatted(cls) -> int: 121 | """Return the longest exit message prefix. 122 | 123 | Returns: 124 | The longest exit message prefix 125 | """ 126 | return max(len(str(member)) for member in cls) 127 | 128 | def __str__(self) -> str: 129 | """Return the exit message prefix as a string. 130 | 131 | Returns: 132 | The exit message prefix as a string 133 | """ 134 | return f"{' ' * (self._longest_name() - len(self.name))}{self.name.capitalize()}: " 135 | 136 | def simple(self) -> str: 137 | """Simple formatting for level name. 138 | 139 | Returns: 140 | The level name 141 | """ 142 | return f"{self.name.capitalize()}: " 143 | 144 | 145 | @dataclass 146 | class Msg: 147 | """An object to hold a message to present when exiting.""" 148 | 149 | #: The message that will be presented 150 | message: str 151 | #: The prefix for the message, used for formatting 152 | prefix: Level = Level.ERROR 153 | 154 | @property 155 | def color(self) -> str: 156 | """Return a color for the prefix. 157 | 158 | :returns: The color for the prefix 159 | """ 160 | color_mapping = { 161 | Level.CRITICAL: Color.BRIGHT_RED, 162 | Level.DEBUG: Color.GREY, 163 | Level.ERROR: Color.RED, 164 | Level.HINT: Color.CYAN, 165 | Level.INFO: Color.MAGENTA, 166 | Level.NOTE: Color.GREEN, 167 | Level.WARNING: Color.YELLOW, 168 | } 169 | return color_mapping[self.prefix] 170 | 171 | def to_lines( 172 | self, 173 | color: bool, # noqa: FBT001 174 | width: int, 175 | with_prefix: bool, # noqa: FBT001 176 | ) -> list[str]: 177 | """Output exit message to the console. 178 | 179 | Args: 180 | color: Whether to color the message 181 | width: Constrain message to width 182 | with_prefix: Whether to prefix the message 183 | Returns: 184 | The exit message as a string 185 | """ 186 | prefix_length = Level.longest_formatted() 187 | indent = " " * prefix_length 188 | 189 | lines = [] 190 | message_lines = self.message.splitlines() 191 | 192 | # --no-ansi, will set color to false, so assume minimal formatting 193 | if not color: 194 | return [self.prefix.simple() + message_lines[0], *message_lines[1:]] 195 | 196 | lines.extend( 197 | textwrap.fill( 198 | message_lines[0], 199 | width=width, 200 | break_on_hyphens=False, 201 | initial_indent=str(self.prefix) if with_prefix else indent, 202 | subsequent_indent=indent, 203 | ).splitlines(), 204 | ) 205 | 206 | if len(message_lines) > 1: 207 | for line in message_lines[1:]: 208 | lines.extend( 209 | textwrap.fill( 210 | line, 211 | width=width, 212 | break_on_hyphens=False, 213 | initial_indent=indent, 214 | subsequent_indent=indent, 215 | ).splitlines(), 216 | ) 217 | 218 | start_color = self.color if color else "" 219 | end_color = Color.END if color else "" 220 | 221 | return [f"{start_color}{line}{end_color}" for line in lines] 222 | 223 | 224 | class Output: 225 | """Output functionality.""" 226 | 227 | def __init__( # pylint: disable=too-many-positional-arguments 228 | self, 229 | log_file: str, 230 | log_level: str, 231 | log_append: str, 232 | term_features: TermFeatures, 233 | verbosity: int, 234 | ) -> None: 235 | """Initialize the output object. 236 | 237 | Args: 238 | log_file: The path to the los.get_terminal_size()og file 239 | log_level: The log level 240 | log_append: Whether to append to the log file 241 | term_features: Terminal features 242 | verbosity: The verbosity level 243 | """ 244 | self._verbosity = verbosity 245 | self.call_count: dict[str, int] = { 246 | "critical": 0, 247 | "debug": 0, 248 | "error": 0, 249 | "hint": 0, 250 | "info": 0, 251 | "note": 0, 252 | "warning": 0, 253 | } 254 | self.term_features = term_features 255 | self.logger = logging.getLogger("ansible_dev_environment") 256 | if log_level != "notset": 257 | self.logger.setLevel(log_level.upper()) 258 | self.log_to_file = bool(log_file) 259 | log_file_path = Path(log_file) 260 | if log_file_path.exists() and log_append == "false": 261 | log_file_path.unlink() 262 | formatter = logging.Formatter( 263 | fmt="%(asctime)s %(levelname)s '%(name)s.%(module)s.%(funcName)s' %(message)s", 264 | ) 265 | handler = logging.FileHandler(log_file) 266 | handler.setFormatter(formatter) 267 | handler.setLevel(log_level.upper()) 268 | self.logger.addHandler(handler) 269 | self.log_to_file = True 270 | else: 271 | self.log_to_file = False 272 | 273 | def critical(self, msg: str) -> None: 274 | """Print a critical message to the console. 275 | 276 | Args: 277 | msg: The message to print 278 | """ 279 | self.call_count["critical"] += 1 280 | self.log(msg, level=Level.CRITICAL) 281 | sys.exit(1) 282 | 283 | def debug(self, msg: str) -> None: 284 | """Print a debug message to the console. 285 | 286 | Args: 287 | msg: The message to print 288 | """ 289 | self.call_count["debug"] += 1 290 | self.log(msg, level=Level.DEBUG) 291 | 292 | def error(self, msg: str) -> None: 293 | """Print an error message to the console. 294 | 295 | Args: 296 | msg: The message to print 297 | """ 298 | self.call_count["error"] += 1 299 | self.log(msg, level=Level.ERROR) 300 | 301 | def hint(self, msg: str) -> None: 302 | """Print a hint message to the console. 303 | 304 | Args: 305 | msg: The message to print 306 | """ 307 | self.call_count["hint"] += 1 308 | self.log(msg, level=Level.HINT) 309 | 310 | def info(self, msg: str) -> None: 311 | """Print a hint message to the console. 312 | 313 | Args: 314 | msg: The message to print 315 | """ 316 | self.call_count["info"] += 1 317 | self.log(msg, level=Level.INFO) 318 | 319 | def note(self, msg: str) -> None: 320 | """Print a note message to the console. 321 | 322 | Args: 323 | msg: The message to print 324 | """ 325 | self.call_count["note"] += 1 326 | self.log(msg, level=Level.NOTE) 327 | 328 | def warning(self, msg: str) -> None: 329 | """Print a warning message to the console. 330 | 331 | Args: 332 | msg: The message to print 333 | """ 334 | self.call_count["warning"] += 1 335 | self.log(msg, level=Level.WARNING) 336 | 337 | def log(self, msg: str, level: Level = Level.ERROR) -> None: 338 | """Print a message to the console. 339 | 340 | Args: 341 | msg: The message to print 342 | level: The message level 343 | """ 344 | if self.log_to_file: 345 | self.logger.log(level.log_level, msg, stacklevel=3) 346 | 347 | set_width = console_width() 348 | 349 | debug = 2 350 | info = 1 351 | if (self._verbosity < debug and level == Level.DEBUG) or ( 352 | self._verbosity < info and level == Level.INFO 353 | ): 354 | return 355 | 356 | lines = Msg(message=msg, prefix=level).to_lines( 357 | color=self.term_features.color, 358 | width=set_width, 359 | with_prefix=True, 360 | ) 361 | if level in (Level.CRITICAL, Level.ERROR): 362 | print("\n".join(lines), file=sys.stderr) # noqa: T201 363 | else: 364 | print("\n".join(lines)) # noqa: T201 365 | -------------------------------------------------------------------------------- /src/ansible_dev_environment/subcommands/__init__.py: -------------------------------------------------------------------------------- 1 | """The subcommands module contains all the subcommands for ansible-dev-environment.""" 2 | 3 | # ruff: noqa: F401 4 | from __future__ import annotations 5 | 6 | from .checker import Checker as Check 7 | from .inspector import Inspector as Inspect 8 | from .installer import Installer as Install 9 | from .lister import Lister as List 10 | from .treemaker import TreeMaker as Tree 11 | from .uninstaller import UnInstaller as Uninstall 12 | -------------------------------------------------------------------------------- /src/ansible_dev_environment/subcommands/checker.py: -------------------------------------------------------------------------------- 1 | """The dependency checker.""" 2 | 3 | from __future__ import annotations 4 | 5 | import json 6 | import subprocess 7 | import sys 8 | 9 | from typing import TYPE_CHECKING 10 | 11 | from packaging.specifiers import InvalidSpecifier, SpecifierSet 12 | from packaging.version import Version 13 | 14 | from ansible_dev_environment.utils import ( 15 | builder_introspect, 16 | collect_manifests, 17 | oxford_join, 18 | subprocess_run, 19 | ) 20 | 21 | 22 | if TYPE_CHECKING: 23 | from ansible_dev_environment.config import Config 24 | from ansible_dev_environment.output import Output 25 | 26 | 27 | class Checker: 28 | """The dependency checker.""" 29 | 30 | def __init__(self, config: Config, output: Output) -> None: 31 | """Initialize the checker. 32 | 33 | Args: 34 | config: The application configuration. 35 | output: The application output object. 36 | """ 37 | self._config: Config = config 38 | self._collections_missing: bool 39 | self._output: Output = output 40 | self._system_dep_missing: bool 41 | 42 | def run(self) -> None: 43 | """Run the checker.""" 44 | builder_introspect(config=self._config, output=self._output) 45 | self._collection_deps() 46 | self.system_deps() 47 | self._python_deps() 48 | 49 | def _collection_deps(self) -> None: # noqa: C901, PLR0912, PLR0915 50 | """Check collection dependencies.""" 51 | collections = collect_manifests( 52 | target=self._config.site_pkg_collections_path, 53 | venv_cache_dir=self._config.venv_cache_dir, 54 | ) 55 | missing = False 56 | for collection_name, details in collections.items(): 57 | error = "Collection {collection_name} has malformed metadata." 58 | if not isinstance(details, dict): 59 | self._output.error(error) 60 | continue 61 | if not isinstance(details["collection_info"], dict): 62 | self._output.error(error) 63 | continue 64 | if not isinstance(details["collection_info"]["dependencies"], dict): 65 | self._output.error(error) 66 | continue 67 | 68 | msg = f"Checking dependencies for {collection_name}." 69 | self._output.debug(msg) 70 | 71 | deps = details["collection_info"]["dependencies"] 72 | 73 | if not deps: 74 | msg = f"Collection {collection_name} has no dependencies." 75 | self._output.debug(msg) 76 | continue 77 | for dep, version in deps.items(): 78 | if not isinstance(version, str): 79 | err = ( 80 | f"Collection {collection_name} has malformed dependency version for {dep}." 81 | ) 82 | self._output.error(err) 83 | continue 84 | try: 85 | spec = SpecifierSet(version) 86 | except InvalidSpecifier: 87 | spec = SpecifierSet(">=0.0.0") 88 | msg = f"Invalid version specifier {version}, assuming >=0.0.0." 89 | self._output.debug(msg) 90 | if dep in collections: 91 | dependency = collections[dep] 92 | error = "Collection {dep} has malformed metadata." 93 | if not isinstance(dependency, dict): 94 | self._output.error(error) 95 | continue 96 | if not isinstance(dependency["collection_info"], dict): 97 | self._output.error(error) 98 | continue 99 | 100 | dep_version = dependency["collection_info"]["version"] 101 | if not isinstance(dep_version, str): 102 | self._output.error(error) 103 | continue 104 | dep_spec = Version(dep_version) 105 | if not spec.contains(dep_spec): 106 | err = ( 107 | f"Collection {collection_name} requires {dep} {version}" 108 | f" but {dep} {dep_version} is installed." 109 | ) 110 | self._output.error(err) 111 | missing = True 112 | 113 | else: 114 | msg = ( 115 | f"Collection {collection_name} requires {dep} {version}" 116 | f" and {dep} {dep_version} is installed." 117 | ) 118 | self._output.debug(msg) 119 | else: 120 | err = ( 121 | f"Collection {collection_name} requires" 122 | f" {dep} {version} but it is not installed." 123 | ) 124 | self._output.error(err) 125 | msg = f"Try running `ade install {dep}`" 126 | self._output.hint(msg) 127 | missing = True 128 | 129 | if not missing: 130 | msg = "All dependant collections are installed." 131 | self._output.note(msg) 132 | self._collections_missing = missing 133 | 134 | def _python_deps(self) -> None: 135 | """Check Python dependencies.""" 136 | if self._system_dep_missing: 137 | msg = "System packages are missing. Python dependency checking may fail." 138 | self._output.warning(msg) 139 | msg = "Install system packages and re-run `ade check`." 140 | self._output.hint(msg) 141 | missing_file = self._config.venv_cache_dir / "pip-report.txt" 142 | command = ( 143 | f"{self._config.venv_interpreter} -m pip install -r" 144 | f" {self._config.discovered_python_reqs} --dry-run" 145 | f" --report {missing_file}" 146 | ) 147 | work = "Building python package dependency tree" 148 | 149 | try: 150 | subprocess_run( 151 | command=command, 152 | verbose=self._config.args.verbose, 153 | msg=work, 154 | output=self._output, 155 | ) 156 | except subprocess.CalledProcessError as exc: 157 | err = f"Failed to check python dependencies: {exc}" 158 | self._output.critical(err) 159 | with missing_file.open() as file: 160 | pip_report = json.load(file) 161 | 162 | if self._collections_missing: 163 | msg = "Python packages required by missing collections are not included." 164 | self._output.note(msg) 165 | 166 | if "install" not in pip_report or not pip_report["install"]: 167 | if not self._collections_missing: 168 | msg = "All python dependencies are installed." 169 | self._output.note(msg) 170 | return 171 | 172 | missing = [ 173 | f"{package['metadata']['name']}=={package['metadata']['version']}" 174 | for package in pip_report["install"] 175 | ] 176 | 177 | err = f"Missing python dependencies: {oxford_join(missing)}" 178 | self._output.error(err) 179 | msg = f"Try running `pip install {' '.join(missing)}`." 180 | self._output.hint(msg) 181 | 182 | def system_deps(self) -> None: 183 | """Check the bindep file.""" 184 | msg = "Checking system packages." 185 | self._output.info(msg) 186 | 187 | command = f"{sys.executable} -m bindep -b -f {self._config.discovered_bindep_reqs}" 188 | work = "Checking system package requirements" 189 | try: 190 | subprocess_run( 191 | command=command, 192 | verbose=self._config.args.verbose, 193 | msg=work, 194 | output=self._output, 195 | ) 196 | except subprocess.CalledProcessError as exc: 197 | if exc.stderr: 198 | msg = f"Bindep failed to find required system packages. {exc.stderr}" 199 | self._output.error(msg) 200 | msg = "Check the format of the bindep.txt file." 201 | self._output.hint(msg) 202 | self._system_dep_missing = True 203 | return 204 | lines = exc.stdout.splitlines() 205 | msg = ( 206 | "Required system packages are missing." 207 | " Please use the system package manager to install them." 208 | "\n- " 209 | ) 210 | msg += "\n- ".join(lines) 211 | self._output.warning(msg) 212 | self._system_dep_missing = True 213 | else: 214 | msg = "All required system packages are installed." 215 | self._output.note(msg) 216 | self._system_dep_missing = False 217 | return 218 | -------------------------------------------------------------------------------- /src/ansible_dev_environment/subcommands/inspector.py: -------------------------------------------------------------------------------- 1 | """The collection inspect command.""" 2 | 3 | from __future__ import annotations 4 | 5 | import json 6 | 7 | from typing import TYPE_CHECKING 8 | 9 | from ansible_dev_environment.utils import collect_manifests 10 | 11 | 12 | if TYPE_CHECKING: 13 | from ansible_dev_environment.config import Config 14 | from ansible_dev_environment.output import Output 15 | 16 | try: 17 | from pip._vendor.rich import print_json 18 | 19 | HAS_RICH = True 20 | except ImportError: 21 | HAS_RICH = False 22 | 23 | 24 | class Inspector: 25 | """The Inspector class.""" 26 | 27 | def __init__(self, config: Config, output: Output) -> None: 28 | """Initialize the Inspector. 29 | 30 | Args: 31 | config: The application configuration. 32 | output: The application output object. 33 | """ 34 | self._config = config 35 | self._output = output 36 | 37 | def run(self) -> None: 38 | """Run the Inspector.""" 39 | collections = collect_manifests( 40 | target=self._config.site_pkg_collections_path, 41 | venv_cache_dir=self._config.venv_cache_dir, 42 | ) 43 | 44 | output = json.dumps(collections, indent=4, sort_keys=True) 45 | if HAS_RICH and self._config.term_features.color: 46 | print_json(output) 47 | else: 48 | print(output) # noqa: T201 49 | -------------------------------------------------------------------------------- /src/ansible_dev_environment/subcommands/lister.py: -------------------------------------------------------------------------------- 1 | """Lister module for ansible-dev-environment.""" 2 | 3 | from __future__ import annotations 4 | 5 | from typing import TYPE_CHECKING 6 | 7 | from ansible_dev_environment.utils import collect_manifests, term_link 8 | 9 | 10 | if TYPE_CHECKING: 11 | from ansible_dev_environment.config import Config 12 | from ansible_dev_environment.output import Output 13 | 14 | 15 | class Lister: 16 | """The Lister class.""" 17 | 18 | def __init__(self, config: Config, output: Output) -> None: 19 | """Initialize the Lister. 20 | 21 | Args: 22 | config: The application configuration. 23 | output: The application output object. 24 | """ 25 | self._config = config 26 | self._output = output 27 | 28 | def run(self) -> None: 29 | """Run the Lister.""" 30 | collections = collect_manifests( 31 | target=self._config.site_pkg_collections_path, 32 | venv_cache_dir=self._config.venv_cache_dir, 33 | ) 34 | 35 | column1_width = 30 36 | column2_width = 10 37 | column3_width = 25 38 | 39 | print( # noqa: T201 40 | f"{'Collection': <{column1_width}}" 41 | f" {'Version': <{column2_width}}" 42 | f" {'Editable project location': <{column3_width}}", 43 | ) 44 | print( # noqa: T201 45 | f"{'-' * (column1_width)} {'-' * (column2_width)} {'-' * (column3_width)}", 46 | ) 47 | 48 | for fqcn, collection in collections.items(): 49 | err = f"Collection {fqcn} has malformed metadata." 50 | ci = collection["collection_info"] 51 | if not isinstance(ci, dict): 52 | self._output.error(err) 53 | continue 54 | collection_name = ci["name"] 55 | collection_namespace = ci["namespace"] 56 | collection_version = ci["version"] 57 | if not isinstance(collection_name, str): 58 | self._output.error(err) 59 | continue 60 | if not isinstance(collection_namespace, str): 61 | self._output.error(err) 62 | continue 63 | if not isinstance(collection_version, str): 64 | self._output.error(err) 65 | continue 66 | 67 | collection_path = ( 68 | self._config.site_pkg_collections_path / collection_namespace / collection_name 69 | ) 70 | if collection_path.is_symlink(): 71 | editable_location = str(collection_path.resolve()) 72 | else: 73 | editable_location = "" 74 | 75 | docs = ci.get("documentation") 76 | homepage = ci.get("homepage") 77 | repository = ci.get("repository") 78 | issues = ci.get("issues") 79 | link = repository or homepage or docs or issues or "https://ansible.com" 80 | if not isinstance(link, str): 81 | self._output.error(err) 82 | link = "https://ansible.com" 83 | fqcn_linked = term_link( 84 | uri=link, 85 | label=fqcn, 86 | term_features=self._config.term_features, 87 | ) 88 | 89 | print( # noqa: T201 90 | fqcn_linked + " " * (column1_width - len(fqcn)), 91 | f"{ci['version']: <{column2_width}}", 92 | f"{editable_location: <{column3_width}}", 93 | ) 94 | -------------------------------------------------------------------------------- /src/ansible_dev_environment/subcommands/treemaker.py: -------------------------------------------------------------------------------- 1 | """Generate a dependency tree.""" 2 | 3 | from __future__ import annotations 4 | 5 | from typing import TYPE_CHECKING, Union, cast 6 | 7 | from ansible_dev_environment.tree import Tree 8 | from ansible_dev_environment.utils import builder_introspect, collect_manifests 9 | 10 | 11 | if TYPE_CHECKING: 12 | from ansible_dev_environment.config import Config 13 | from ansible_dev_environment.output import Output 14 | 15 | ScalarVal = bool | str | float | int | None 16 | JSONVal = ScalarVal | list["JSONVal"] | dict[str, "JSONVal"] 17 | 18 | TreeWithReqs = dict[str, Union[list[str], "TreeWithReqs"]] 19 | TreeWithoutReqs = dict[str, "TreeWithoutReqs"] 20 | 21 | 22 | class TreeMaker: 23 | """Generate a dependency tree.""" 24 | 25 | def __init__(self, config: Config, output: Output) -> None: 26 | """Initialize the object. 27 | 28 | Args: 29 | config: The application configuration. 30 | output: The application output object. 31 | """ 32 | self._config = config 33 | self._output = output 34 | 35 | def run(self) -> None: # noqa: C901, PLR0912, PLR0915 36 | """Run the command.""" 37 | builder_introspect(self._config, self._output) 38 | 39 | with self._config.discovered_python_reqs.open("r") as reqs_file: 40 | python_deps = reqs_file.read().splitlines() 41 | 42 | collections = collect_manifests( 43 | target=self._config.site_pkg_collections_path, 44 | venv_cache_dir=self._config.venv_cache_dir, 45 | ) 46 | tree_dict: TreeWithoutReqs = {c: {} for c in collections} 47 | 48 | links: dict[str, str] = {} 49 | for collection_name, collection in collections.items(): 50 | err = f"Collection {collection_name} has malformed metadata." 51 | if not isinstance(collection["collection_info"], dict): 52 | self._output.error(err) 53 | continue 54 | if not isinstance(collection["collection_info"]["dependencies"], dict): 55 | self._output.error(err) 56 | continue 57 | 58 | for dep in collection["collection_info"]["dependencies"]: 59 | if not isinstance(dep, str): 60 | err = f"Collection {collection_name} has malformed dependency." 61 | self._output.error(err) 62 | continue 63 | target = tree_dict[collection_name] 64 | target[dep] = tree_dict[dep] 65 | 66 | docs = collection["collection_info"].get("documentation") 67 | homepage = collection["collection_info"].get("homepage") 68 | repository = collection["collection_info"].get("repository") 69 | issues = collection["collection_info"].get("issues") 70 | fallback = "https://ansible.com" 71 | link = repository or homepage or docs or issues or fallback 72 | if not isinstance(link, str): 73 | err = f"Collection {collection_name} has malformed repository metadata." 74 | self._output.error(err) 75 | link = fallback 76 | links[collection_name] = link 77 | 78 | if self._config.args.verbose >= 1: 79 | add_python_reqs( 80 | tree_dict=cast("TreeWithReqs", tree_dict), 81 | collection_name=collection_name, 82 | python_deps=python_deps, 83 | ) 84 | green: list[str] = [] 85 | if self._config.args.verbose >= 1: 86 | green.append("python requirements") 87 | for line in python_deps: 88 | if "#" not in line: 89 | green.append(line.strip()) 90 | green.append(line.split("#", 1)[0].strip()) 91 | 92 | more_verbose = 2 93 | if self._config.args.verbose >= more_verbose: 94 | tree = Tree(obj=cast("JSONVal", tree_dict), term_features=self._config.term_features) 95 | tree.links = links 96 | tree.green.extend(green) 97 | rendered = tree.render() 98 | print(rendered) # noqa: T201 99 | else: 100 | pruned_tree_dict: TreeWithoutReqs = {} 101 | for collection_name in tree_dict: 102 | found = False 103 | for value in tree_dict.values(): 104 | if collection_name in value: 105 | found = True 106 | if not found: 107 | pruned_tree_dict[collection_name] = tree_dict[collection_name] 108 | 109 | tree = Tree( 110 | obj=cast("JSONVal", pruned_tree_dict), 111 | term_features=self._config.term_features, 112 | ) 113 | tree.links = links 114 | tree.green.extend(green) 115 | rendered = tree.render() 116 | print(rendered) # noqa: T201 117 | 118 | if self._config.args.verbose >= 1: 119 | msg = "Only direct python dependencies are shown." 120 | self._output.info(msg) 121 | hint = "Run `pip show ` to see indirect dependencies." 122 | self._output.hint(hint) 123 | 124 | 125 | def add_python_reqs( 126 | tree_dict: TreeWithReqs, 127 | collection_name: str, 128 | python_deps: list[str], 129 | ) -> None: 130 | """Add Python dependencies to the tree. 131 | 132 | Args: 133 | tree_dict: The tree dict. 134 | collection_name: The collection name. 135 | python_deps: The Python dependencies. 136 | 137 | Raises: 138 | TypeError: If the tree dict is not a dict. 139 | """ 140 | collection = tree_dict[collection_name] 141 | if not isinstance(collection, dict): 142 | msg = "Did you really name a collection 'python requirements'?" 143 | raise TypeError(msg) 144 | 145 | deps = [] 146 | for dep in sorted(python_deps): 147 | if "#" in dep: 148 | name, comment = dep.split("#", 1) 149 | else: 150 | name = dep 151 | comment = "" 152 | if collection_name in comment: 153 | deps.append(name.strip()) 154 | 155 | collection["python requirements"] = deps 156 | -------------------------------------------------------------------------------- /src/ansible_dev_environment/subcommands/uninstaller.py: -------------------------------------------------------------------------------- 1 | """The installer.""" 2 | 3 | from __future__ import annotations 4 | 5 | import shutil 6 | 7 | from pathlib import Path 8 | from typing import TYPE_CHECKING 9 | 10 | from ansible_dev_environment.collection import ( 11 | Collection, 12 | parse_collection_request, 13 | ) 14 | from ansible_dev_environment.utils import collections_from_requirements 15 | 16 | 17 | if TYPE_CHECKING: 18 | from ansible_dev_environment.config import Config 19 | from ansible_dev_environment.output import Output 20 | 21 | 22 | class UnInstaller: 23 | """The uninstaller class.""" 24 | 25 | def __init__(self, config: Config, output: Output) -> None: 26 | """Initialize the installer. 27 | 28 | Args: 29 | config: The application configuration. 30 | output: The application output object. 31 | """ 32 | self._config = config 33 | self._output = output 34 | self._collection: Collection 35 | 36 | def run(self) -> None: 37 | """Run the uninstaller.""" 38 | if len(self._config.args.collection_specifier) > 1: 39 | msg = "Only one collection can be uninstalled at a time." 40 | self._output.critical(msg) 41 | 42 | if self._config.args.requirement: 43 | requirements_path = Path(self._config.args.requirement) 44 | if not requirements_path.exists(): 45 | err = f"Failed to find requirements file: {requirements_path}" 46 | self._output.critical(err) 47 | collections = collections_from_requirements(requirements_path) 48 | for collection in collections: 49 | self._collection = parse_collection_request( 50 | string=collection["name"], 51 | config=self._config, 52 | output=self._output, 53 | ) 54 | self._remove_collection() 55 | else: 56 | self._collection = parse_collection_request( 57 | string=self._config.args.collection_specifier[0], 58 | config=self._config, 59 | output=self._output, 60 | ) 61 | self._remove_collection() 62 | 63 | def _remove_collection(self) -> None: 64 | """Remove the collection.""" 65 | msg = f"Checking {self._collection.name} at {self._collection.site_pkg_path}" 66 | self._output.debug(msg) 67 | 68 | if self._collection.site_pkg_path.exists(): 69 | msg = f"Exists: {self._collection.site_pkg_path}" 70 | self._output.debug(msg) 71 | 72 | if self._collection.site_pkg_path.is_symlink(): 73 | self._collection.site_pkg_path.unlink() 74 | else: 75 | shutil.rmtree(self._collection.site_pkg_path) 76 | msg = f"Removed {self._collection.name}" 77 | self._output.note(msg) 78 | else: 79 | err = f"Failed to find {self._collection.name}: {self._collection.site_pkg_path}" 80 | self._output.warning(err) 81 | 82 | for entry in self._config.site_pkg_collections_path.iterdir(): 83 | if all( 84 | ( 85 | entry.is_dir(), 86 | entry.name.startswith(self._collection.name), 87 | entry.suffix == ".info", 88 | ), 89 | ): 90 | shutil.rmtree(entry) 91 | msg = f"Removed {self._collection.name}*.info: {entry}" 92 | self._output.debug(msg) 93 | 94 | collection_namespace_root = self._collection.site_pkg_path.parent 95 | 96 | if collection_namespace_root.exists(): 97 | try: 98 | collection_namespace_root.rmdir() 99 | msg = f"Removed collection namespace root: {collection_namespace_root}" 100 | self._output.debug(msg) 101 | except OSError as exc: 102 | msg = f"Failed to remove collection namespace root: {exc}" 103 | self._output.debug(msg) 104 | 105 | if self._config.site_pkg_collections_path.exists(): 106 | try: 107 | self._config.site_pkg_collections_path.rmdir() 108 | msg = f"Removed collection root: {self._config.site_pkg_collections_path}" 109 | self._output.debug(msg) 110 | except OSError as exc: 111 | msg = f"Failed to remove collection root: {exc}" 112 | self._output.debug(msg) 113 | -------------------------------------------------------------------------------- /src/ansible_dev_environment/tree.py: -------------------------------------------------------------------------------- 1 | """An ascii tree generator.""" 2 | 3 | from __future__ import annotations 4 | 5 | from .utils import Ansi, TermFeatures, term_link 6 | 7 | 8 | ScalarVal = bool | str | float | int | None 9 | JSONVal = ScalarVal | list["JSONVal"] | dict[str, "JSONVal"] 10 | 11 | 12 | class Tree: # pylint: disable=R0902 13 | """Renderer for the tree. 14 | 15 | Attributes: 16 | PIPE: The pipe character 17 | ELBOW: The elbow character 18 | TEE: The tee character 19 | PIPE_PREFIX: The pipe prefix 20 | SPACE_PREFIX: The space prefix 21 | """ 22 | 23 | PIPE = "│" 24 | ELBOW = "└──" 25 | TEE = "├──" 26 | PIPE_PREFIX = "│ " 27 | SPACE_PREFIX = " " 28 | 29 | def __init__( 30 | self, 31 | obj: JSONVal, 32 | term_features: TermFeatures, 33 | ) -> None: 34 | """Initialize the renderer. 35 | 36 | Args: 37 | obj: The object to render 38 | term_features: The terminal features 39 | """ 40 | self.obj = obj 41 | self._lines: list[str] = [] 42 | self.blue: list[ScalarVal] = [] 43 | self.bold: list[ScalarVal] = [] 44 | self.cyan: list[ScalarVal] = [] 45 | self.green: list[ScalarVal] = [] 46 | self.italic: list[ScalarVal] = [] 47 | self.magenta: list[ScalarVal] = [] 48 | self.red: list[ScalarVal] = [] 49 | self.reversed: list[ScalarVal] = [] 50 | self.underline: list[ScalarVal] = [] 51 | self.white: list[ScalarVal] = [] 52 | self.yellow: list[ScalarVal] = [] 53 | self.links: dict[str, str] = {} 54 | self.term_features = term_features 55 | 56 | def in_color(self, val: ScalarVal) -> str: 57 | """Colorize the string. 58 | 59 | Args: 60 | val: The thing to colorize 61 | 62 | Returns: 63 | The colorized string 64 | """ 65 | if not self.term_features.color: 66 | return str(val) 67 | 68 | ansis = ( 69 | "blue", 70 | "bold", 71 | "cyan", 72 | "green", 73 | "italic", 74 | "magenta", 75 | "red", 76 | "reversed", 77 | "underline", 78 | "white", 79 | "yellow", 80 | ) 81 | start = "" 82 | val_str = str(val) 83 | for ansi in ansis: 84 | matches = getattr(self, ansi) 85 | if val_str in [str(match) for match in matches]: 86 | start += getattr(Ansi, ansi.upper()) 87 | if val_str in self.links: 88 | val_str = term_link( 89 | uri=self.links[val_str], 90 | term_features=self.term_features, 91 | label=val_str, 92 | ) 93 | 94 | if start: 95 | return f"{start}{val_str}{Ansi.RESET}" 96 | 97 | return val_str 98 | 99 | @staticmethod 100 | def is_scalar(obj: JSONVal) -> bool: 101 | """Check if the object is a scalar. 102 | 103 | Args: 104 | obj: The object to check 105 | Returns: 106 | Whether the object is a scalar 107 | """ 108 | return isinstance(obj, str | int | float | bool) or obj is None 109 | 110 | def _print_tree( # noqa: C901, PLR0912 # pylint: disable=too-many-positional-arguments 111 | self, 112 | obj: JSONVal, 113 | is_last: bool, # noqa: FBT001 114 | is_root: bool, # noqa: FBT001 115 | was_list: bool, # noqa: FBT001 116 | prefix: str = "", 117 | ) -> None: 118 | """Print the tree. 119 | 120 | Args: 121 | obj: The object to print 122 | is_last: Whether the object is the last in the list | dict 123 | is_root: Whether the object is the root of the tree 124 | was_list: Whether the object was a list 125 | prefix: The prefix to use 126 | 127 | Raises: 128 | TypeError: If the object is not a dict, list, or scalar 129 | """ 130 | if isinstance(obj, dict): 131 | for i, (key, value) in enumerate(obj.items()): 132 | is_last = i == len(obj) - 1 133 | key_repr = f"{Ansi.ITALIC}{key}{Ansi.RESET}" if was_list else key 134 | if is_root: 135 | decorator = "" 136 | elif is_last: 137 | decorator = self.ELBOW 138 | else: 139 | decorator = self.TEE 140 | self.append(f"{prefix}{decorator}{self.in_color(key_repr)}") 141 | 142 | if is_root: 143 | prefix_rev = prefix 144 | elif is_last: 145 | prefix_rev = prefix + self.SPACE_PREFIX 146 | else: 147 | prefix_rev = prefix + self.PIPE_PREFIX 148 | self._print_tree( 149 | obj=value, 150 | prefix=prefix_rev, 151 | is_last=self.is_scalar(value), 152 | is_root=False, 153 | was_list=False, 154 | ) 155 | 156 | elif isinstance(obj, list): 157 | is_complex = any(isinstance(item, dict | list) for item in obj) 158 | is_long = len(obj) > 1 159 | if is_complex and is_long: 160 | repr_obj = {str(i): item for i, item in enumerate(obj)} 161 | self._print_tree( 162 | obj=repr_obj, 163 | prefix=prefix, 164 | is_last=is_last, 165 | is_root=False, 166 | was_list=True, 167 | ) 168 | else: 169 | for i, item in enumerate(obj): 170 | is_last = i == len(obj) - 1 171 | self._print_tree( 172 | obj=item, 173 | prefix=prefix, 174 | is_last=is_last, 175 | is_root=False, 176 | was_list=False, 177 | ) 178 | 179 | elif self.is_scalar(obj): 180 | self.append( 181 | f"{prefix}{self.ELBOW if is_last else self.TEE}{self.in_color(obj)}", 182 | ) 183 | else: 184 | err = f"Invalid type {type(obj)}" 185 | raise TypeError(err) 186 | 187 | def append(self, string: str) -> None: 188 | """Append a line to the output. 189 | 190 | Args: 191 | string: The string to append 192 | """ 193 | self._lines.append(string) 194 | 195 | def render(self) -> str: 196 | """Render the root of the tree. 197 | 198 | Returns: 199 | The rendered tree 200 | """ 201 | # if not isinstance(self.obj, dict): 202 | self._print_tree(self.obj, is_last=False, is_root=True, was_list=False) 203 | return "\n".join(self._lines) + "\n" 204 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- 1 | """Tests.""" 2 | -------------------------------------------------------------------------------- /tests/conftest.py: -------------------------------------------------------------------------------- 1 | """Global conftest.py for pytest. 2 | 3 | The root package import below happens before the pytest workers are forked, so it 4 | picked up by the initial coverage process for a source match. 5 | 6 | Without it, coverage reports the following false positive error: 7 | 8 | CoverageWarning: No data was collected. (no-data-collected) 9 | 10 | This works in conjunction with the coverage source_pkg set to the package such that 11 | a `coverage run --debug trace` shows the source package and file match. 12 | 13 | <...> 14 | Imported source package 'ansible_dev_environment' as '/**/src//__init__.py' 15 | <...> 16 | Tracing '/**/src//__init__.py' 17 | """ 18 | 19 | from __future__ import annotations 20 | 21 | import json 22 | import os 23 | import shutil 24 | import tarfile 25 | import tempfile 26 | import warnings 27 | 28 | from pathlib import Path 29 | from typing import TYPE_CHECKING 30 | from urllib.request import HTTPError, urlopen 31 | 32 | import pytest 33 | import yaml 34 | 35 | import ansible_dev_environment # noqa: F401 36 | 37 | from ansible_dev_environment.cli import Cli 38 | 39 | 40 | if TYPE_CHECKING: 41 | from collections.abc import Generator 42 | 43 | from ansible_dev_environment.config import Config 44 | 45 | 46 | GALAXY_CACHE = Path(__file__).parent.parent / ".cache" / ".galaxy_cache" 47 | REQS_FILE_NAME = "requirements.yml" 48 | 49 | 50 | @pytest.fixture 51 | def galaxy_cache() -> Path: 52 | """Return the galaxy cache directory. 53 | 54 | Returns: 55 | The galaxy cache directory. 56 | """ 57 | return GALAXY_CACHE 58 | 59 | 60 | def check_download_collection(name: str, dest: Path) -> None: 61 | """Download a collection if necessary. 62 | 63 | Args: 64 | name: The collection name. 65 | dest: The destination directory. 66 | """ 67 | namespace, name = name.split(".") 68 | base_url = "https://galaxy.ansible.com/api/v3/plugin/ansible/content/published/collections" 69 | 70 | url = f"{base_url}/index/{namespace}/{name}/" 71 | try: 72 | with urlopen(url) as response: # noqa: S310 73 | body = response.read() 74 | except HTTPError: 75 | err = f"Failed to check collection version: {name}" 76 | pytest.fail(err) 77 | 78 | json_response = json.loads(body) 79 | version = json_response["highest_version"]["version"] 80 | file_name = f"{namespace}-{name}-{version}.tar.gz" 81 | file_path = dest / file_name 82 | if file_path.exists(): 83 | return 84 | for found_file in dest.glob(f"{namespace}-{name}-*"): 85 | found_file.unlink() 86 | url = f"{base_url}/artifacts/{file_name}" 87 | warnings.warn(f"Downloading collection: {file_name}", stacklevel=0) 88 | try: 89 | with urlopen(url) as response, file_path.open(mode="wb") as file: # noqa: S310 90 | file.write(response.read()) 91 | except HTTPError: 92 | err = f"Failed to download collection: {name}" 93 | pytest.fail(err) 94 | 95 | 96 | def pytest_sessionstart(session: pytest.Session) -> None: 97 | """Start the server. 98 | 99 | Args: 100 | session: The pytest session. 101 | """ 102 | if session.config.option.collectonly: 103 | return 104 | 105 | if os.environ.get("PYTEST_XDIST_WORKER"): 106 | return 107 | 108 | if not GALAXY_CACHE.exists(): 109 | GALAXY_CACHE.mkdir(parents=True, exist_ok=True) 110 | 111 | for collection in ("ansible.utils", "ansible.scm", "ansible.posix"): 112 | check_download_collection(collection, GALAXY_CACHE) 113 | 114 | reqs: dict[str, list[dict[str, str]]] = {"collections": []} 115 | 116 | for found_file in GALAXY_CACHE.glob("*.tar.gz"): 117 | reqs["collections"].append({"name": str(found_file)}) 118 | 119 | requirements = GALAXY_CACHE / REQS_FILE_NAME 120 | requirements.write_text(yaml.dump(reqs)) 121 | 122 | 123 | @pytest.fixture(name="monkey_session", scope="session") 124 | def fixture_monkey_session() -> Generator[pytest.MonkeyPatch, None, None]: 125 | """Session scoped monkeypatch fixture. 126 | 127 | Yields: 128 | pytest.MonkeyPatch: The monkeypatch fixture. 129 | """ 130 | monkey_patch = pytest.MonkeyPatch() 131 | yield monkey_patch 132 | monkey_patch.undo() 133 | 134 | 135 | @pytest.fixture(name="session_dir", scope="session") 136 | def fixture_session_dir() -> Generator[Path, None, None]: 137 | """A session scoped temporary directory. 138 | 139 | Yields: 140 | Path: Temporary directory. 141 | """ 142 | temp_dir = Path(tempfile.mkdtemp()) 143 | yield temp_dir 144 | shutil.rmtree(temp_dir) 145 | 146 | 147 | @pytest.fixture 148 | def installable_local_collection(tmp_path: Path) -> Path: 149 | """Provide a local collection that can be installed. 150 | 151 | Args: 152 | tmp_path: Temporary directory. 153 | 154 | Returns: 155 | The path to the local collection. 156 | """ 157 | src_dir = tmp_path / "ansible.posix" 158 | tar_file_path = next(GALAXY_CACHE.glob("ansible-posix*")) 159 | with tarfile.open(tar_file_path, "r") as tar: 160 | try: 161 | tar.extractall(src_dir, filter="data") 162 | except TypeError: 163 | tar.extractall(src_dir) # noqa: S202 164 | galaxy_contents = { 165 | "authors": "author", 166 | "name": "posix", 167 | "namespace": "ansible", 168 | "readme": "readme", 169 | "version": "1.0.0", 170 | } 171 | yaml.dump(galaxy_contents, (src_dir / "galaxy.yml").open("w")) 172 | return src_dir 173 | 174 | 175 | @pytest.fixture(scope="session") 176 | def session_venv(session_dir: Path, monkey_session: pytest.MonkeyPatch) -> Config: 177 | """Create a temporary venv for the session. 178 | 179 | Add some common collections to the venv. 180 | 181 | Since this is a session level fixture, care should be taken to not manipulate it 182 | or the resulting config in a way that would affect other tests. 183 | 184 | Args: 185 | session_dir: Temporary directory. 186 | monkey_session: Pytest monkeypatch fixture. 187 | 188 | Returns: 189 | The configuration object for the venv. 190 | """ 191 | venv_path = session_dir / "venv" 192 | monkey_session.setattr( 193 | "sys.argv", 194 | [ 195 | "ade", 196 | "install", 197 | "--no-seed", 198 | "-r", 199 | str(GALAXY_CACHE / REQS_FILE_NAME), 200 | "--venv", 201 | str(venv_path), 202 | "--ll", 203 | "debug", 204 | "--la", 205 | "true", 206 | "--lf", 207 | str(session_dir / "ade.log"), 208 | "-vvv", 209 | ], 210 | ) 211 | cli = Cli() 212 | cli.parse_args() 213 | cli.init_output() 214 | cli.args_sanity() 215 | cli.isolation_check() 216 | with pytest.raises(SystemExit): 217 | cli.run() 218 | return cli.config 219 | 220 | 221 | @pytest.fixture 222 | def function_venv(tmp_path: Path, monkeypatch: pytest.MonkeyPatch) -> Config: 223 | """Create a temporary venv for the session. 224 | 225 | Add some common collections to the venv. 226 | 227 | Since this is a session level fixture, care should be taken to not manipulate it 228 | or the resulting config in a way that would affect other tests. 229 | 230 | Args: 231 | tmp_path: Temporary directory. 232 | monkeypatch: Pytest monkeypatch fixture. 233 | 234 | Returns: 235 | The configuration object for the venv. 236 | """ 237 | venv_path = tmp_path / "venv" 238 | monkeypatch.setattr( 239 | "sys.argv", 240 | [ 241 | "ade", 242 | "install", 243 | "-r", 244 | str(GALAXY_CACHE / REQS_FILE_NAME), 245 | "--venv", 246 | str(venv_path), 247 | "--ll", 248 | "debug", 249 | "--la", 250 | "true", 251 | "--lf", 252 | str(tmp_path / "ade.log"), 253 | "-vvv", 254 | ], 255 | ) 256 | cli = Cli() 257 | cli.parse_args() 258 | cli.init_output() 259 | cli.args_sanity() 260 | cli.isolation_check() 261 | with pytest.raises(SystemExit): 262 | cli.run() 263 | return cli.config 264 | -------------------------------------------------------------------------------- /tests/fixtures/galaxy.yml: -------------------------------------------------------------------------------- 1 | name: cname 2 | namespace: cnamespace 3 | version: 0.0.1 4 | license: 5 | - MIT 6 | description: cdescription 7 | readme: README.md 8 | authors: 9 | - cauthor 10 | repository: crepository 11 | -------------------------------------------------------------------------------- /tests/fixtures/requirements.yml: -------------------------------------------------------------------------------- 1 | --- 2 | collections: 3 | - ansible.scm 4 | - name: ansible.netcommon 5 | -------------------------------------------------------------------------------- /tests/integration/__init__.py: -------------------------------------------------------------------------------- 1 | """Integration tests.""" 2 | -------------------------------------------------------------------------------- /tests/integration/test_basic.py: -------------------------------------------------------------------------------- 1 | """Basic smoke tests.""" 2 | 3 | from __future__ import annotations 4 | 5 | import json 6 | import re 7 | import sys 8 | 9 | from pathlib import Path 10 | 11 | import pytest 12 | 13 | from ansible_dev_environment.cli import main 14 | from ansible_dev_environment.output import Output 15 | from ansible_dev_environment.utils import TermFeatures, subprocess_run 16 | 17 | 18 | @pytest.mark.skipif( 19 | sys.version_info > (3, 12), 20 | reason="pylibssh issues 3.13, https://github.com/ansible/pylibssh/issues/699", 21 | ) 22 | def test_venv( 23 | capsys: pytest.CaptureFixture[str], 24 | tmp_path: Path, 25 | monkeypatch: pytest.MonkeyPatch, 26 | ) -> None: 27 | """Basic smoke test. 28 | 29 | Test for a local collection install with optional dependencies 30 | 31 | Args: 32 | capsys: Capture stdout and stderr 33 | tmp_path: Temporary directory 34 | monkeypatch: Pytest monkeypatch 35 | """ 36 | # disable color for json output 37 | term_features = TermFeatures(color=False, links=False) 38 | output = Output( 39 | log_file=f"/{tmp_path}/ansible-dev-environment.log", 40 | log_level="INFO", 41 | log_append="false", 42 | term_features=term_features, 43 | verbosity=0, 44 | ) 45 | command = ( 46 | f"git clone https://github.com/ansible-collections/cisco.nxos.git {tmp_path / 'cisco.nxos'}" 47 | ) 48 | subprocess_run( 49 | command=command, 50 | verbose=True, 51 | msg="", 52 | output=output, 53 | ) 54 | monkeypatch.chdir(tmp_path) 55 | 56 | monkeypatch.setattr( 57 | "sys.argv", 58 | [ 59 | "ade", 60 | "install", 61 | str(tmp_path / "cisco.nxos[test]"), 62 | "--venv=venv", 63 | "--no-ansi", 64 | "-vvv", 65 | ], 66 | ) 67 | with pytest.raises(SystemExit): 68 | main() 69 | 70 | captured = capsys.readouterr() 71 | assert "Installed collections include: ansible.netcommon, ansible.utils," in captured.out 72 | assert "Optional dependencies found" in captured.out 73 | assert "'pytest-xdist # from collection user'" in captured.out 74 | 75 | monkeypatch.setattr( 76 | "sys.argv", 77 | ["ade", "list", "--venv=venv"], 78 | ) 79 | with pytest.raises(SystemExit): 80 | main() 81 | captured = capsys.readouterr() 82 | assert "cisco.nxos" in captured.out 83 | assert "ansible.netcommon" in captured.out 84 | assert "ansible.utils" in captured.out 85 | assert "unknown" not in captured.out 86 | 87 | monkeypatch.setattr( 88 | "sys.argv", 89 | [ 90 | "ade", 91 | "uninstall", 92 | "ansible.utils", 93 | "--venv=venv", 94 | ], 95 | ) 96 | with pytest.raises(SystemExit): 97 | main() 98 | captured = capsys.readouterr() 99 | assert "Removed ansible.utils" in captured.out 100 | 101 | monkeypatch.setattr( 102 | "sys.argv", 103 | ["ade", "inspect", "--venv=venv", "--no-ansi"], 104 | ) 105 | with pytest.raises(SystemExit): 106 | main() 107 | captured = capsys.readouterr() 108 | captured_json = json.loads(captured.out) 109 | assert "cisco.nxos" in captured_json 110 | assert "ansible.netcommon" in captured_json 111 | assert "ansible.utils" not in captured_json 112 | 113 | monkeypatch.setattr( 114 | "sys.argv", 115 | ["ade", "check", "--venv=venv"], 116 | ) 117 | with pytest.raises(SystemExit): 118 | main() 119 | captured = capsys.readouterr() 120 | assert "Collection ansible.netcommon requires ansible.util" in captured.err 121 | 122 | 123 | def test_non_local( 124 | capsys: pytest.CaptureFixture[str], 125 | tmp_path: Path, 126 | monkeypatch: pytest.MonkeyPatch, 127 | ) -> None: 128 | """Install non-local collection. 129 | 130 | Args: 131 | capsys: Capture stdout and stderr 132 | tmp_path: Temporary directory 133 | monkeypatch: Pytest monkeypatch 134 | """ 135 | monkeypatch.setattr( 136 | "sys.argv", 137 | [ 138 | "ade", 139 | "install", 140 | "ansible.scm", 141 | f"--venv={tmp_path / 'venv'}", 142 | ], 143 | ) 144 | with pytest.raises(SystemExit): 145 | main() 146 | string = "Installed collections include: ansible.scm and ansible.utils" 147 | captured = capsys.readouterr() 148 | assert string in captured.out 149 | monkeypatch.setattr( 150 | "sys.argv", 151 | ["ade", "tree", f"--venv={tmp_path / 'venv'}", "-v"], 152 | ) 153 | with pytest.raises(SystemExit): 154 | main() 155 | captured = capsys.readouterr() 156 | assert "ansible.scm\n├──ansible.utils" in captured.out 157 | assert "├──jsonschema" in captured.out 158 | 159 | 160 | @pytest.mark.skipif( 161 | sys.version_info > (3, 12), 162 | reason="pylibssh issues 3.13, https://github.com/ansible/pylibssh/issues/699", 163 | ) 164 | def test_requirements( 165 | capsys: pytest.CaptureFixture[str], 166 | tmp_path: Path, 167 | monkeypatch: pytest.MonkeyPatch, 168 | ) -> None: 169 | """Install non-local collection. 170 | 171 | Args: 172 | capsys: Capture stdout and stderr 173 | tmp_path: Temporary directory 174 | monkeypatch: Pytest monkeypatch 175 | 176 | """ 177 | requirements = Path(__file__).parent.parent / "fixtures" / "requirements.yml" 178 | monkeypatch.setattr( 179 | "sys.argv", 180 | [ 181 | "ade", 182 | "install", 183 | f"--venv={tmp_path / 'venv'}", 184 | "-r", 185 | str(requirements), 186 | ], 187 | ) 188 | with pytest.raises(SystemExit): 189 | main() 190 | string = "Installed collections include: ansible.netcommon, ansible.scm," 191 | captured = capsys.readouterr() 192 | assert string in captured.out 193 | monkeypatch.setattr( 194 | "sys.argv", 195 | [ 196 | "ade", 197 | "uninstall", 198 | f"--venv={tmp_path / 'venv'}", 199 | "-r", 200 | str(requirements), 201 | ], 202 | ) 203 | with pytest.raises(SystemExit): 204 | main() 205 | captured = capsys.readouterr() 206 | string = "Removed ansible.netcommon" 207 | assert string in captured.out 208 | string = "Removed ansible.scm" 209 | assert string in captured.out 210 | 211 | monkeypatch.setattr( 212 | "sys.argv", 213 | ["ade", "list", f"--venv={tmp_path / 'venv'}"], 214 | ) 215 | with pytest.raises(SystemExit): 216 | main() 217 | captured = capsys.readouterr() 218 | assert "ansible.netcommon" not in captured.out 219 | assert "ansible.scm" not in captured.out 220 | assert "ansible.utils" in captured.out 221 | 222 | 223 | def test_system_site_packages( 224 | capsys: pytest.CaptureFixture[str], 225 | tmp_path: Path, 226 | monkeypatch: pytest.MonkeyPatch, 227 | ) -> None: 228 | """Install non-local collection. 229 | 230 | Args: 231 | capsys: Capture stdout and stderr 232 | tmp_path: Temporary directory 233 | monkeypatch: Pytest monkeypatch 234 | """ 235 | monkeypatch.setattr( 236 | "sys.argv", 237 | [ 238 | "ade", 239 | "install", 240 | "ansible.utils", 241 | f"--venv={tmp_path / 'venv'}", 242 | "--system-site-packages", 243 | "--no-ansi", 244 | "-vvv", 245 | ], 246 | ) 247 | with pytest.raises(SystemExit): 248 | main() 249 | captured = capsys.readouterr() 250 | assert "with system site packages" in captured.out 251 | assert "Installed collections include: ansible.utils" in captured.out 252 | 253 | 254 | def test_specified_core_version_pass( 255 | tmp_path: Path, 256 | monkeypatch: pytest.MonkeyPatch, 257 | ) -> None: 258 | """Install a user-specified core version. 259 | 260 | Args: 261 | tmp_path: Temporary directory 262 | monkeypatch: Pytest monkeypatch 263 | """ 264 | term_features = TermFeatures(color=False, links=False) 265 | output = Output( 266 | log_file=f"/{tmp_path}/ansible-dev-environment.log", 267 | log_level="INFO", 268 | log_append="false", 269 | term_features=term_features, 270 | verbosity=0, 271 | ) 272 | command = "pip index versions ansible-core" 273 | result = subprocess_run(command=command, verbose=True, msg="", output=output) 274 | 275 | version_pattern = re.compile(r"\d+\.\d+\.\d+") 276 | versions = version_pattern.findall(result.stdout) 277 | second_latest = versions[2] 278 | 279 | venv_path = tmp_path / ".venv" 280 | 281 | monkeypatch.setattr( 282 | "sys.argv", 283 | [ 284 | "ade", 285 | "install", 286 | f"--venv={venv_path}", 287 | f"--ansible-core-version={second_latest}", 288 | ], 289 | ) 290 | with pytest.raises(SystemExit): 291 | main() 292 | command = f"{venv_path}/bin/ansible --version" 293 | result = subprocess_run(command=command, verbose=True, msg="", output=output) 294 | assert second_latest in result.stdout 295 | 296 | 297 | def test_specified_dev_tools_version_pass( 298 | tmp_path: Path, 299 | monkeypatch: pytest.MonkeyPatch, 300 | ) -> None: 301 | """Install a user-specified ansible-dev-tools version. 302 | 303 | Args: 304 | tmp_path: Temporary directory 305 | monkeypatch: Pytest monkeypatch 306 | """ 307 | term_features = TermFeatures(color=False, links=False) 308 | output = Output( 309 | log_file=f"/{tmp_path}/ansible-dev-environment.log", 310 | log_level="INFO", 311 | log_append="false", 312 | term_features=term_features, 313 | verbosity=0, 314 | ) 315 | command = "pip index versions ansible-dev-tools" 316 | result = subprocess_run(command=command, verbose=True, msg="", output=output) 317 | 318 | version_pattern = re.compile(r"\d+\.\d+\.\d+") 319 | versions = version_pattern.findall(result.stdout) 320 | second_latest = versions[2] 321 | 322 | venv_path = tmp_path / ".venv" 323 | 324 | monkeypatch.setattr( 325 | "sys.argv", 326 | [ 327 | "ade", 328 | "install", 329 | f"--venv={venv_path}", 330 | "--seed", 331 | f"--ansible-dev-tools-version={second_latest}", 332 | ], 333 | ) 334 | with pytest.raises(SystemExit): 335 | main() 336 | command = f"{venv_path}/bin/pip list | grep ansible-dev-tools" 337 | result = subprocess_run(command=command, verbose=True, msg="", output=output) 338 | assert second_latest in result.stdout 339 | -------------------------------------------------------------------------------- /tests/integration/test_user_python.py: -------------------------------------------------------------------------------- 1 | """Test multiple variants of python version.""" 2 | 3 | from __future__ import annotations 4 | 5 | import sys 6 | 7 | from pathlib import Path 8 | 9 | import pytest 10 | 11 | from ansible_dev_environment.cli import main 12 | 13 | 14 | def generate_pythons_uv() -> list[str]: 15 | """Generate a list of python versions. 16 | 17 | Returns: 18 | List of python versions 19 | """ 20 | pythons = ["python3"] 21 | version = sys.version.split(" ", maxsplit=1)[0] 22 | pythons.append(version) 23 | pythons.append(f"python{version}") 24 | major_minor = version.rsplit(".", 1)[0] 25 | pythons.append(major_minor) 26 | major, minor = major_minor.split(".") 27 | one_less = f"{major}.{int(minor) - 1}" 28 | pythons.append(one_less) 29 | sys_path = str(Path("/usr/bin/python3").resolve()) 30 | pythons.append(sys_path) 31 | return pythons 32 | 33 | 34 | @pytest.mark.parametrize("python", generate_pythons_uv()) 35 | def test_specified_python_version_uv( 36 | python: str, 37 | capsys: pytest.CaptureFixture[str], 38 | tmp_path: Path, 39 | monkeypatch: pytest.MonkeyPatch, 40 | ) -> None: 41 | """Build the venv with a user specified python version. 42 | 43 | Args: 44 | python: Python version 45 | capsys: Capture stdout and stderr 46 | tmp_path: Temporary directory 47 | monkeypatch: Pytest monkeypatch 48 | """ 49 | venv_path = tmp_path / ".venv" 50 | monkeypatch.setattr( 51 | "sys.argv", 52 | [ 53 | "ade", 54 | "install", 55 | f"--venv={venv_path}", 56 | f"--python={python}", 57 | ], 58 | ) 59 | with pytest.raises(SystemExit): 60 | main() 61 | 62 | captured = capsys.readouterr() 63 | venv_line = [ 64 | line for line in captured.out.splitlines() if "Created virtual environment:" in line 65 | ] 66 | assert venv_line[0].endswith(python) 67 | 68 | 69 | def generate_pythons_pip() -> list[str]: 70 | """Generate a list of python versions. 71 | 72 | Returns: 73 | List of python versions 74 | """ 75 | pythons = ["python3"] 76 | version = sys.version.split(" ", maxsplit=1)[0] 77 | major_minor = version.rsplit(".", 1)[0] 78 | pythons.append(major_minor) 79 | sys_path = str(Path("/usr/bin/python3").resolve()) 80 | pythons.append(sys_path) 81 | return pythons 82 | 83 | 84 | @pytest.mark.parametrize("python", generate_pythons_pip()) 85 | def test_specified_python_version_pip( 86 | python: str, 87 | capsys: pytest.CaptureFixture[str], 88 | tmp_path: Path, 89 | monkeypatch: pytest.MonkeyPatch, 90 | ) -> None: 91 | """Build the venv with a user specified python version. 92 | 93 | Args: 94 | python: Python version 95 | capsys: Capture stdout and stderr 96 | tmp_path: Temporary directory 97 | monkeypatch: Pytest monkeypatch 98 | """ 99 | venv_path = tmp_path / ".venv" 100 | monkeypatch.setattr( 101 | "sys.argv", 102 | ["ade", "install", f"--venv={venv_path}", f"--python={python}", "--no-uv"], 103 | ) 104 | with pytest.raises(SystemExit): 105 | main() 106 | 107 | captured = capsys.readouterr() 108 | venv_line = [ 109 | line for line in captured.out.splitlines() if "Created virtual environment:" in line 110 | ] 111 | assert venv_line[0].endswith(python) 112 | -------------------------------------------------------------------------------- /tests/test_argparser.py: -------------------------------------------------------------------------------- 1 | """Tests for the arg_parser module.""" 2 | 3 | from __future__ import annotations 4 | 5 | import pytest 6 | 7 | from ansible_dev_environment.arg_parser import ( 8 | ArgumentParser, 9 | CustomHelpFormatter, 10 | _group_titles, 11 | ) 12 | 13 | 14 | def test_no_option_string( 15 | capsys: pytest.CaptureFixture[str], 16 | ) -> None: 17 | """Test an argument without an option string. 18 | 19 | Args: 20 | capsys: Pytest fixture. 21 | """ 22 | parser = ArgumentParser( 23 | formatter_class=CustomHelpFormatter, 24 | ) 25 | parser.add_argument( 26 | dest="test", 27 | action="store_true", 28 | help="Test this", 29 | ) 30 | parser.print_help() 31 | captured = capsys.readouterr() 32 | assert "Test this" in captured.out 33 | 34 | 35 | def test_one_string( 36 | capsys: pytest.CaptureFixture[str], 37 | ) -> None: 38 | """Test an argument without an option string. 39 | 40 | Args: 41 | capsys: Pytest fixture. 42 | """ 43 | parser = ArgumentParser( 44 | formatter_class=CustomHelpFormatter, 45 | ) 46 | parser.add_argument( 47 | "-t", 48 | dest="test", 49 | action="store_true", 50 | help="Test this", 51 | ) 52 | parser.print_help() 53 | captured = capsys.readouterr() 54 | assert "-t Test this" in captured.out 55 | 56 | 57 | def test_too_many_string( 58 | monkeypatch: pytest.MonkeyPatch, 59 | ) -> None: 60 | """Test an argument with too many option strings. 61 | 62 | Args: 63 | monkeypatch: Pytest fixture. 64 | """ 65 | monkeypatch.setattr("sys.argv", ["prog", "--help"]) 66 | 67 | parser = ArgumentParser( 68 | formatter_class=CustomHelpFormatter, 69 | ) 70 | parser.add_argument( 71 | "-t", 72 | "-test", 73 | "--test", 74 | action="store_true", 75 | help="Test this", 76 | ) 77 | with pytest.raises(ValueError, match="Too many option strings"): 78 | parser.parse_args() 79 | 80 | 81 | def test_group_no_title(capsys: pytest.CaptureFixture[str]) -> None: 82 | """Test a group without a title. 83 | 84 | Args: 85 | capsys: Pytest fixture. 86 | """ 87 | parser = ArgumentParser( 88 | formatter_class=CustomHelpFormatter, 89 | ) 90 | parser.add_argument_group() 91 | _group_titles(parser) 92 | parser.print_help() 93 | captured = capsys.readouterr() 94 | assert "--help" in captured.out 95 | -------------------------------------------------------------------------------- /tests/unit/__init__.py: -------------------------------------------------------------------------------- 1 | """Unit tests.""" 2 | -------------------------------------------------------------------------------- /tests/unit/conftest.py: -------------------------------------------------------------------------------- 1 | """Fixtures for unit tests.""" 2 | 3 | from __future__ import annotations 4 | 5 | from typing import TYPE_CHECKING 6 | 7 | import pytest 8 | 9 | from ansible_dev_environment.output import Output 10 | from ansible_dev_environment.utils import TermFeatures 11 | 12 | 13 | if TYPE_CHECKING: 14 | from pathlib import Path 15 | 16 | 17 | @pytest.fixture 18 | def output(tmp_path: Path) -> Output: 19 | """Create an Output class object as fixture. 20 | 21 | Args: 22 | tmp_path: Temporary directory. 23 | 24 | Returns: 25 | Output: Output class object. 26 | """ 27 | return Output( 28 | log_file=str(tmp_path) + "ansible-creator.log", 29 | log_level="notset", 30 | log_append="false", 31 | term_features=TermFeatures(color=False, links=False), 32 | verbosity=3, 33 | ) 34 | 35 | 36 | @pytest.fixture(name="_wide_console") 37 | def _wide_console(monkeypatch: pytest.MonkeyPatch) -> None: 38 | """Fixture to set the terminal width to 1000 to prevent wrapping. 39 | 40 | Args: 41 | monkeypatch: Pytest fixture. 42 | """ 43 | 44 | def _console_width() -> int: 45 | """Return a large console width. 46 | 47 | Returns: 48 | int: Console width. 49 | """ 50 | return 1000 51 | 52 | monkeypatch.setattr( 53 | "ansible_dev_environment.output.console_width", 54 | _console_width, 55 | ) 56 | -------------------------------------------------------------------------------- /tests/unit/test_cli.py: -------------------------------------------------------------------------------- 1 | """Test cli functionality.""" 2 | 3 | from __future__ import annotations 4 | 5 | from pathlib import Path 6 | from typing import TYPE_CHECKING 7 | 8 | import argcomplete 9 | import pytest 10 | 11 | from ansible_dev_environment.arg_parser import ArgumentParser, apply_envvars, parse 12 | from ansible_dev_environment.cli import Cli, main 13 | 14 | 15 | if TYPE_CHECKING: 16 | from collections.abc import Generator 17 | 18 | 19 | def test_cpi(monkeypatch: pytest.MonkeyPatch) -> None: 20 | """Test the cpi option. 21 | 22 | Args: 23 | monkeypatch: Pytest fixture. 24 | """ 25 | monkeypatch.setattr("sys.argv", ["ansible-dev-environment", "install", "--cpi"]) 26 | cli = Cli() 27 | cli.parse_args() 28 | assert cli.args.requirement.parts[-3:] == ( 29 | "ansible-dev-environment", 30 | ".config", 31 | "source-requirements.yml", 32 | ) 33 | 34 | 35 | @pytest.mark.filterwarnings("ignore") 36 | def test_tty(monkeypatch: pytest.MonkeyPatch) -> None: 37 | """Test term features with tty. 38 | 39 | Args: 40 | monkeypatch: Pytest fixture. 41 | """ 42 | monkeypatch.setattr("sys.stdout.isatty", (lambda: True)) 43 | monkeypatch.setattr("os.environ", {"NO_COLOR": "anything"}) 44 | monkeypatch.setattr("sys.argv", ["ansible-dev-environment", "install"]) 45 | cli = Cli() 46 | cli.parse_args() 47 | cli.init_output() 48 | assert not cli.output.term_features.color 49 | assert not cli.output.term_features.links 50 | 51 | 52 | @pytest.mark.usefixtures("_wide_console") 53 | def test_missing_requirements( 54 | capsys: pytest.CaptureFixture[str], 55 | monkeypatch: pytest.MonkeyPatch, 56 | tmp_path: Path, 57 | ) -> None: 58 | """Test the missing requirements file. 59 | 60 | Args: 61 | capsys: Pytest stdout capture fixture. 62 | monkeypatch: Pytest fixture. 63 | tmp_path: Pytest fixture. 64 | """ 65 | requirements_file = tmp_path / "requirements.yml" 66 | monkeypatch.setattr( 67 | "sys.argv", 68 | ["ansible-dev-environment", "install", "-r", str(requirements_file)], 69 | ) 70 | match = f"Requirements file not found: {requirements_file}" 71 | with pytest.raises(SystemExit): 72 | main(dry=True) 73 | captured = capsys.readouterr() 74 | assert match in captured.err 75 | 76 | 77 | def test_editable_many( 78 | capsys: pytest.CaptureFixture[str], 79 | monkeypatch: pytest.MonkeyPatch, 80 | ) -> None: 81 | """Test the editable option with too many arguments. 82 | 83 | Args: 84 | capsys: Pytest stdout capture fixture. 85 | monkeypatch: Pytest fixture. 86 | """ 87 | monkeypatch.setattr( 88 | "sys.argv", 89 | ["ansible-dev-environment", "install", "--venv", "venv", "-e", "one", "two"], 90 | ) 91 | with pytest.raises(SystemExit): 92 | main(dry=True) 93 | captured = capsys.readouterr() 94 | assert "Editable can only be used with a single collection specifier." in captured.err 95 | 96 | 97 | def test_editable_requirements( 98 | capsys: pytest.CaptureFixture[str], 99 | monkeypatch: pytest.MonkeyPatch, 100 | tmp_path: Path, 101 | ) -> None: 102 | """Test the editable option with requirements file. 103 | 104 | Args: 105 | capsys: Pytest stdout capture fixture. 106 | monkeypatch: Pytest fixture. 107 | tmp_path: Pytest fixture. 108 | """ 109 | requirements_file = tmp_path / "requirements.yml" 110 | requirements_file.touch() 111 | monkeypatch.setattr( 112 | "sys.argv", 113 | ["ansible-dev-environment", "install", "-r", str(requirements_file), "-e"], 114 | ) 115 | with pytest.raises(SystemExit): 116 | main(dry=True) 117 | captured = capsys.readouterr() 118 | assert "Editable can not be used with a requirements file." in captured.err 119 | 120 | 121 | @pytest.mark.parametrize( 122 | "env_var", 123 | ( 124 | "ANSIBLE_COLLECTIONS_PATHS", 125 | "ANSIBLE_COLLECTION_PATH", 126 | ), 127 | ) 128 | def test_acp_env_var_set( 129 | env_var: str, 130 | capsys: pytest.CaptureFixture[str], 131 | monkeypatch: pytest.MonkeyPatch, 132 | ) -> None: 133 | """Test the ansible collection path environment variable set. 134 | 135 | Args: 136 | env_var: Environment variable name. 137 | capsys: Pytest stdout capture fixture. 138 | monkeypatch: Pytest fixture. 139 | """ 140 | monkeypatch.setenv(env_var, "test") 141 | monkeypatch.setattr("sys.argv", ["ansible-dev-environment", "install"]) 142 | with pytest.raises(SystemExit): 143 | main(dry=True) 144 | captured = capsys.readouterr() 145 | assert f"{env_var} is set" in captured.err 146 | 147 | 148 | @pytest.mark.usefixtures("_wide_console") 149 | def test_collections_in_home( 150 | capsys: pytest.CaptureFixture[str], 151 | monkeypatch: pytest.MonkeyPatch, 152 | tmp_path: Path, 153 | ) -> None: 154 | """Test the collections in home directory. 155 | 156 | Args: 157 | capsys: Pytest stdout capture fixture. 158 | monkeypatch: Pytest fixture. 159 | tmp_path: Pytest fixture. 160 | """ 161 | monkeypatch.setattr( 162 | "sys.argv", 163 | ["ansible-dev-environment", "install", "--venv", "venv"], 164 | ) 165 | monkeypatch.setenv("HOME", str(tmp_path)) 166 | monkeypatch.setenv("ANSIBLE_HOME", str(tmp_path / ".ansible")) 167 | collection_root = tmp_path / ".ansible" / "collections" / "ansible_collections" 168 | (collection_root / "ansible" / "utils").mkdir(parents=True) 169 | with pytest.raises(SystemExit): 170 | main(dry=True) 171 | captured = capsys.readouterr() 172 | msg = f"Collections found in {collection_root}" 173 | assert msg in captured.err 174 | 175 | 176 | def test_collections_in_user( 177 | capsys: pytest.CaptureFixture[str], 178 | monkeypatch: pytest.MonkeyPatch, 179 | ) -> None: 180 | """Test the collections in user directory. 181 | 182 | Args: 183 | capsys: Pytest stdout capture fixture. 184 | monkeypatch: Pytest fixture. 185 | """ 186 | usr_path = Path("/usr/share/ansible/collections") 187 | exists = Path.exists 188 | 189 | def _exists(path: Path) -> bool: 190 | """Patch the exists method. 191 | 192 | Args: 193 | path: Path object. 194 | 195 | Returns: 196 | bool: True if the path exists. 197 | """ 198 | if path == usr_path: 199 | return True 200 | return exists(path) 201 | 202 | monkeypatch.setattr(Path, "exists", _exists) 203 | 204 | iterdir = Path.iterdir 205 | 206 | def _iterdir(path: Path) -> list[Path] | Generator[Path, None, None]: 207 | """Patch the iterdir method. 208 | 209 | Args: 210 | path: Path object. 211 | 212 | Returns: 213 | List of paths or generator. 214 | """ 215 | if path == usr_path: 216 | return [usr_path / "ansible_collections"] 217 | return iterdir(path) 218 | 219 | monkeypatch.setattr(Path, "iterdir", _iterdir) 220 | 221 | monkeypatch.setattr( 222 | "sys.argv", 223 | ["ansible-dev-environment", "install", "--venv", "venv"], 224 | ) 225 | with pytest.raises(SystemExit): 226 | main(dry=True) 227 | captured = capsys.readouterr() 228 | msg = f"Collections found in {usr_path}" 229 | assert msg in captured.err 230 | 231 | 232 | def test_no_venv_specified( 233 | capsys: pytest.CaptureFixture[str], 234 | monkeypatch: pytest.MonkeyPatch, 235 | ) -> None: 236 | """Test no virtual environment specified. 237 | 238 | Args: 239 | capsys: Pytest stdout capture fixture. 240 | monkeypatch: Pytest fixture. 241 | """ 242 | monkeypatch.setattr( 243 | "sys.argv", 244 | ["ansible-dev-environment", "install", "-vvv"], 245 | ) 246 | monkeypatch.delenv("VIRTUAL_ENV", raising=False) 247 | main(dry=True) 248 | captured = capsys.readouterr() 249 | 250 | found = [line for line in captured.out.splitlines() if "Debug: venv: " in line] 251 | assert len(found) == 1 252 | assert found[0].endswith(".venv") 253 | 254 | 255 | def test_exit_code_one( 256 | capsys: pytest.CaptureFixture[str], 257 | monkeypatch: pytest.MonkeyPatch, 258 | ) -> None: 259 | """Test exit code one. 260 | 261 | Args: 262 | capsys: Pytest stdout capture fixture. 263 | monkeypatch: Pytest fixture. 264 | """ 265 | monkeypatch.setattr( 266 | "sys.argv", 267 | ["ansible-dev-environment", "install"], 268 | ) 269 | cli = Cli() 270 | cli.parse_args() 271 | cli.init_output() 272 | cli.output.error("Test error") 273 | with pytest.raises(SystemExit) as excinfo: 274 | cli.exit() 275 | expected = 1 276 | assert excinfo.value.code == expected 277 | captured = capsys.readouterr() 278 | assert "Test error" in captured.err 279 | 280 | 281 | def test_exit_code_two( 282 | capsys: pytest.CaptureFixture[str], 283 | monkeypatch: pytest.MonkeyPatch, 284 | ) -> None: 285 | """Test exit code two. 286 | 287 | Args: 288 | capsys: Pytest stdout capture fixture. 289 | monkeypatch: Pytest fixture. 290 | """ 291 | monkeypatch.setattr( 292 | "sys.argv", 293 | ["ansible-dev-environment", "install"], 294 | ) 295 | cli = Cli() 296 | cli.parse_args() 297 | cli.init_output() 298 | cli.output.warning("Test warning") 299 | with pytest.raises(SystemExit) as excinfo: 300 | cli.exit() 301 | expected = 2 302 | assert excinfo.value.code == expected 303 | captured = capsys.readouterr() 304 | assert "Test warning" in captured.out 305 | 306 | 307 | def test_envvar_mapping_error(monkeypatch: pytest.MonkeyPatch) -> None: 308 | """Test environment mapping error. 309 | 310 | Args: 311 | monkeypatch: Pytest fixture. 312 | """ 313 | monkeypatch.setattr( 314 | "ansible_dev_environment.arg_parser.ENVVAR_MAPPING", 315 | {"foo": "FOO"}, 316 | ) 317 | monkeypatch.setattr( 318 | "sys.argv", 319 | ["ansible-dev-environment", "install"], 320 | ) 321 | cli = Cli() 322 | with pytest.raises(NotImplementedError): 323 | cli.parse_args() 324 | 325 | 326 | def test_apply_envvar_error(monkeypatch: pytest.MonkeyPatch) -> None: 327 | """Test environment mapping error. 328 | 329 | Args: 330 | monkeypatch: Pytest fixture. 331 | """ 332 | monkeypatch.setattr( 333 | "ansible_dev_environment.arg_parser.ENVVAR_MAPPING", 334 | {"foo": "FOO"}, 335 | ) 336 | monkeypatch.setenv("FOO", "42.0") 337 | 338 | parser = ArgumentParser() 339 | parser.add_argument("--foo", type=float, help="helpless") 340 | 341 | with pytest.raises(NotImplementedError) as excinfo: 342 | apply_envvars(args=[], parser=parser) 343 | 344 | assert "not implemented for envvar FOO" in str(excinfo.value) 345 | 346 | 347 | def test_env_wrong_type( 348 | monkeypatch: pytest.MonkeyPatch, 349 | capsys: pytest.CaptureFixture[str], 350 | ) -> None: 351 | """Test wrong type. 352 | 353 | Args: 354 | monkeypatch: Pytest fixture. 355 | capsys: Pytest stdout capture fixture. 356 | """ 357 | monkeypatch.setattr( 358 | "sys.argv", 359 | ["ansible-dev-environment", "install"], 360 | ) 361 | monkeypatch.setenv("ADE_VERBOSE", "not_an_int") 362 | cli = Cli() 363 | with pytest.raises(SystemExit): 364 | cli.parse_args() 365 | captured = capsys.readouterr() 366 | assert "could not convert to int" in captured.err 367 | 368 | 369 | def test_env_wrong_choice( 370 | monkeypatch: pytest.MonkeyPatch, 371 | capsys: pytest.CaptureFixture[str], 372 | ) -> None: 373 | """Test wrong choice. 374 | 375 | Args: 376 | monkeypatch: Pytest fixture. 377 | capsys: Pytest stdout capture fixture. 378 | """ 379 | monkeypatch.setattr( 380 | "sys.argv", 381 | ["ansible-dev-environment", "install"], 382 | ) 383 | monkeypatch.setenv("ADE_ISOLATION_MODE", "wrong_choice") 384 | cli = Cli() 385 | with pytest.raises(SystemExit): 386 | cli.parse_args() 387 | captured = capsys.readouterr() 388 | assert "choose from 'restrictive', 'cfg', 'none'" in captured.err 389 | 390 | 391 | def test_arg_complete(monkeypatch: pytest.MonkeyPatch) -> None: 392 | """Test argument completion. 393 | 394 | Args: 395 | monkeypatch: Pytest fixture. 396 | """ 397 | inited_parser = None 398 | orig_apply_envvars = apply_envvars 399 | 400 | def _apply_envvars( 401 | args: list[str], 402 | parser: ArgumentParser, 403 | ) -> None: 404 | """Apply environment variables to the argument parser. 405 | 406 | Args: 407 | args: List of arguments. 408 | parser: Argument parser. 409 | """ 410 | nonlocal inited_parser 411 | inited_parser = parser 412 | orig_apply_envvars(args, parser) 413 | 414 | monkeypatch.setattr( 415 | "ansible_dev_environment.arg_parser.apply_envvars", 416 | _apply_envvars, 417 | ) 418 | 419 | monkeypatch.setattr( 420 | "sys.argv", 421 | ["ade", "install"], 422 | ) 423 | parse() 424 | 425 | cli = "ade ins" 426 | monkeypatch.setenv("_ARGCOMPLETE", "1") 427 | monkeypatch.setenv("_ARGCOMPLETE_IFS", "\013") 428 | monkeypatch.setenv("COMP_LINE", cli) 429 | monkeypatch.setenv("COMP_POINT", str(len(cli))) 430 | import io 431 | 432 | str_io = io.StringIO() 433 | 434 | argcomplete.autocomplete(inited_parser, exit_method=print, output_stream=str_io) # type: ignore[arg-type] 435 | 436 | output = str_io.getvalue() 437 | assert "inspect" in output 438 | assert "install" in output 439 | -------------------------------------------------------------------------------- /tests/unit/test_cli_deprecated.py: -------------------------------------------------------------------------------- 1 | """Test some deprecated values in the CLI.""" 2 | 3 | from __future__ import annotations 4 | 5 | from typing import TYPE_CHECKING 6 | 7 | from ansible_dev_environment.cli import main 8 | 9 | 10 | if TYPE_CHECKING: 11 | import pytest 12 | 13 | 14 | def test_adt(capsys: pytest.CaptureFixture[str], monkeypatch: pytest.MonkeyPatch) -> None: 15 | """Test the seed option. 16 | 17 | Args: 18 | capsys: Pytest stdout capture fixture. 19 | monkeypatch: Pytest fixture. 20 | """ 21 | # Test the seed option 22 | monkeypatch.setattr( 23 | "sys.argv", 24 | ["ansible-dev-environment", "install", "--adt"], 25 | ) 26 | main(dry=True) 27 | captured = capsys.readouterr() 28 | assert "'--adt' is deprecated" in captured.out 29 | 30 | 31 | def test_skip_uv(capsys: pytest.CaptureFixture[str], monkeypatch: pytest.MonkeyPatch) -> None: 32 | """Test the skip uv option. 33 | 34 | Args: 35 | capsys: Pytest stdout capture fixture. 36 | monkeypatch: Pytest fixture. 37 | """ 38 | # Test the skip uv option 39 | monkeypatch.setattr( 40 | "sys.argv", 41 | ["ansible-dev-environment", "install"], 42 | ) 43 | monkeypatch.setenv("SKIP_UV", "1") 44 | main(dry=True) 45 | captured = capsys.readouterr() 46 | assert "'SKIP_UV' is deprecated" in captured.out 47 | -------------------------------------------------------------------------------- /tests/unit/test_cli_isolation.py: -------------------------------------------------------------------------------- 1 | """Tests specific to isolation.""" 2 | 3 | from __future__ import annotations 4 | 5 | from argparse import Namespace 6 | from pathlib import Path 7 | from typing import TYPE_CHECKING 8 | 9 | import pytest 10 | 11 | from ansible_dev_environment.cli import Cli 12 | from ansible_dev_environment.definitions import COLLECTIONS_PATH as CP 13 | from ansible_dev_environment.definitions import AnsibleCfg 14 | 15 | 16 | if TYPE_CHECKING: 17 | from pathlib import Path 18 | 19 | 20 | @pytest.fixture(name="cli") 21 | def init_cli(tmp_path: Path, monkeypatch: pytest.MonkeyPatch) -> Cli: 22 | """Fixture to mock the CLI for testing. 23 | 24 | Args: 25 | tmp_path: Pytest fixture. 26 | monkeypatch: Pytest fixture for monkey patching. 27 | 28 | Returns: 29 | Cli: Mocked CLI instance. 30 | """ 31 | cwd = tmp_path / "cwd" 32 | home = tmp_path / "home" 33 | system = tmp_path / "system" 34 | cwd.mkdir() 35 | home.mkdir() 36 | system.mkdir() 37 | 38 | monkeypatch.chdir(cwd) 39 | monkeypatch.delenv("ANSIBLE_CONFIG", raising=False) 40 | 41 | args = Namespace( 42 | isolation_mode="cfg", 43 | log_append=False, 44 | log_file="/dev/null", 45 | log_level="warning", 46 | verbose=0, 47 | ) 48 | 49 | cli = Cli() 50 | cli.args = args 51 | cli.acfg_cwd = AnsibleCfg(path=cwd / "ansible.cfg") 52 | cli.acfg_home = AnsibleCfg(path=home / "ansible.cfg") 53 | cli.acfg_system = AnsibleCfg(path=system / "ansible.cfg") 54 | cli.init_output() 55 | return cli 56 | 57 | 58 | def test_acfg_cwd_new(cli: Cli) -> None: 59 | """Test the creation of a new ansible.cfg in the cwd. 60 | 61 | Args: 62 | cli: A Cli instance from a fixture 63 | """ 64 | test_path = cli.acfg_cwd.path 65 | 66 | assert cli.isolation_check() is True 67 | assert test_path.exists() is True 68 | assert cli.isolation_check() is True 69 | assert test_path.read_text() == f"[defaults]\n{CP}\n" 70 | assert cli.acfg_trusted == test_path 71 | 72 | 73 | def test_acfg_cwd_modified(cli: Cli) -> None: 74 | """Test the update of an existing ansible.cfg in the cwd. 75 | 76 | Args: 77 | cli: A Cli instance from a fixture 78 | """ 79 | test_path = cli.acfg_cwd.path 80 | 81 | with test_path.open(mode="w") as f: 82 | f.write("# comment\n") 83 | 84 | expected = f"[defaults]\n{CP}\n# comment\n" 85 | 86 | assert cli.isolation_check() is True 87 | assert test_path.read_text() == expected 88 | 89 | with test_path.open(mode="w") as f: 90 | f.write("[defaults]\n") 91 | f.write("collections_paths = /tmp/collections\n") 92 | f.write("# comment\n") 93 | 94 | assert cli.isolation_check() is True 95 | assert test_path.read_text() == expected 96 | assert cli.acfg_trusted == test_path 97 | 98 | 99 | def test_acfg_home_modified(cli: Cli) -> None: 100 | """Test the update of an existing ansible.cfg in $HOME. 101 | 102 | Args: 103 | cli: A Cli instance from a fixture 104 | """ 105 | with cli.acfg_home.path.open(mode="w") as f: 106 | f.write("# comment\n") 107 | 108 | expected = f"[defaults]\n{CP}\n# comment\n" 109 | 110 | test_path = cli.acfg_home.path 111 | 112 | assert cli.isolation_check() is True 113 | assert test_path.read_text() == expected 114 | assert cli.acfg_trusted == test_path 115 | cli.acfg_trusted = test_path.parent 116 | 117 | with test_path.open(mode="w") as f: 118 | f.write("[defaults]\n") 119 | f.write("collections_paths = /tmp/collections\n") 120 | f.write("# comment\n") 121 | 122 | assert cli.isolation_check() is True 123 | assert test_path.read_text() == expected 124 | assert cli.acfg_trusted == test_path 125 | stat = cli.acfg_trusted.stat().st_mtime 126 | assert cli.isolation_check() is True 127 | assert cli.acfg_trusted.stat().st_mtime == stat 128 | 129 | 130 | def test_acfg_system_ok(cli: Cli) -> None: 131 | """Test collections_path in a system ansible.cfg. 132 | 133 | Args: 134 | cli: A Cli instance from a fixture 135 | """ 136 | test_path = cli.acfg_system.path 137 | 138 | with test_path.open(mode="w") as f: 139 | f.write(f"[defaults]\n{CP}\n") 140 | 141 | assert cli.isolation_check() is True 142 | assert cli.acfg_trusted == test_path 143 | 144 | 145 | def test_isolation_none(cli: Cli) -> None: 146 | """Test isolation_none method. 147 | 148 | Args: 149 | cli: A Cli instance from a fixture 150 | """ 151 | cli.args.isolation_mode = "none" 152 | assert cli.isolation_check() is True 153 | assert cli.acfg_trusted is None 154 | 155 | 156 | def test_invalid_isolation_mode(cli: Cli) -> None: 157 | """Test invalid isolation mode. 158 | 159 | Args: 160 | cli: A Cli instance from a fixture 161 | """ 162 | cli.args.isolation_mode = "invalid" 163 | assert cli.isolation_check() is False 164 | assert cli.acfg_trusted is None 165 | 166 | 167 | def test_isolation_cfg_with_env_var(cli: Cli, monkeypatch: pytest.MonkeyPatch) -> None: 168 | """Test isolation_cfg method with ANSIBLE_CONFIG environment variable. 169 | 170 | Args: 171 | cli: A Cli instance from a fixture 172 | monkeypatch: Pytest fixture for monkey patching. 173 | """ 174 | monkeypatch.setenv("ANSIBLE_CONFIG", str(cli.acfg_cwd.path)) 175 | 176 | assert cli.isolation_check() is False 177 | assert cli.acfg_trusted is None 178 | -------------------------------------------------------------------------------- /tests/unit/test_cli_precedence.py: -------------------------------------------------------------------------------- 1 | """Test for cli and environment variable precedence.""" 2 | 3 | from __future__ import annotations 4 | 5 | from typing import TypedDict 6 | 7 | import pytest 8 | 9 | from ansible_dev_environment.cli import Cli 10 | 11 | 12 | class Data(TypedDict): 13 | """Test data dictionary. 14 | 15 | Attributes: 16 | attr: Attribute name. 17 | args: Command line argument. 18 | env: Environment variable name. 19 | cli_expected: Expected value from command line argument. 20 | env_expected: Expected value from environment variable. 21 | 22 | """ 23 | 24 | attr: str 25 | args: str 26 | env: str 27 | cli_expected: bool | int | str 28 | env_expected: bool | int | str 29 | 30 | 31 | params: list[Data] = [ 32 | { 33 | "attr": "ansi", 34 | "args": "--ansi", 35 | "env": "NO_COLOR", 36 | "env_expected": False, 37 | "cli_expected": True, 38 | }, 39 | { 40 | "attr": "seed", 41 | "args": "--seed", 42 | "env": "ADE_SEED", 43 | "env_expected": False, 44 | "cli_expected": True, 45 | }, 46 | {"attr": "verbose", "args": "-vvv", "env": "ADE_VERBOSE", "env_expected": 2, "cli_expected": 3}, 47 | {"attr": "uv", "args": "--uv", "env": "ADE_UV", "env_expected": False, "cli_expected": True}, 48 | ] 49 | 50 | 51 | @pytest.mark.parametrize("data", params, ids=lambda d: d["attr"]) 52 | def test_cli_precedence_flag( 53 | data: Data, 54 | monkeypatch: pytest.MonkeyPatch, 55 | ) -> None: 56 | """Test CLI precedence over environment variables. 57 | 58 | Args: 59 | data: Test data dictionary. 60 | monkeypatch: Pytest fixture. 61 | 62 | """ 63 | cli = Cli() 64 | 65 | args = ["ade", "install"] 66 | monkeypatch.setenv(data["env"], str(data["env_expected"])) 67 | monkeypatch.setattr("sys.argv", args) 68 | 69 | cli.parse_args() 70 | 71 | assert getattr(cli.args, data["attr"]) == data["env_expected"] 72 | 73 | args.append(data["args"]) 74 | cli.parse_args() 75 | assert getattr(cli.args, data["attr"]) == data["cli_expected"] 76 | -------------------------------------------------------------------------------- /tests/unit/test_collection.py: -------------------------------------------------------------------------------- 1 | """Tests for the collection module.""" 2 | 3 | from __future__ import annotations 4 | 5 | from argparse import Namespace 6 | from typing import TYPE_CHECKING 7 | 8 | import pytest 9 | 10 | from ansible_dev_environment.collection import Collection, get_galaxy 11 | from ansible_dev_environment.config import Config 12 | from ansible_dev_environment.utils import TermFeatures 13 | 14 | 15 | if TYPE_CHECKING: 16 | from pathlib import Path 17 | 18 | from ansible_dev_environment.output import Output 19 | 20 | 21 | @pytest.mark.usefixtures("_wide_console") 22 | def test_get_galaxy_missing( 23 | tmp_path: Path, 24 | output: Output, 25 | capsys: pytest.CaptureFixture[str], 26 | ) -> None: 27 | """Test when the galaxy.yml file is missing. 28 | 29 | Args: 30 | tmp_path: Temporary directory. 31 | output: Output class object. 32 | capsys: Pytest fixture 33 | """ 34 | config = Config( 35 | args=Namespace(), 36 | term_features=TermFeatures(color=False, links=False), 37 | output=output, 38 | ) 39 | collection = Collection( 40 | config=config, 41 | path=tmp_path, 42 | cname="utils", 43 | cnamespace="ansible", 44 | local=False, 45 | original="ansible.utils", 46 | specifier="", 47 | opt_deps="", 48 | csource=[], 49 | ) 50 | with pytest.raises(SystemExit): 51 | get_galaxy(collection, output) 52 | 53 | captured = capsys.readouterr() 54 | assert f"Failed to find {tmp_path / 'galaxy.yml'} in {tmp_path}\n" in captured.err 55 | 56 | 57 | @pytest.mark.usefixtures("_wide_console") 58 | def test_get_galaxy_no_meta( 59 | tmp_path: Path, 60 | output: Output, 61 | capsys: pytest.CaptureFixture[str], 62 | ) -> None: 63 | """Test when the galaxy.yml file is name/namespace. 64 | 65 | Args: 66 | tmp_path: Temporary directory. 67 | output: Output class object. 68 | capsys: Pytest fixture 69 | """ 70 | (tmp_path / "galaxy.yml").write_text("corrupt: yaml\n") 71 | config = Config( 72 | args=Namespace(), 73 | term_features=TermFeatures(color=False, links=False), 74 | output=output, 75 | ) 76 | collection = Collection( 77 | config=config, 78 | path=tmp_path, 79 | cname="utils", 80 | cnamespace="ansible", 81 | local=False, 82 | original="ansible.utils", 83 | specifier="", 84 | opt_deps="", 85 | csource=[], 86 | ) 87 | with pytest.raises(SystemExit): 88 | get_galaxy(collection, output) 89 | 90 | captured = capsys.readouterr() 91 | assert ( 92 | f"Failed to find collection name in {tmp_path / 'galaxy.yml'}: 'namespace'\n" 93 | in captured.err 94 | ) 95 | 96 | 97 | @pytest.mark.usefixtures("_wide_console") 98 | def test_get_galaxy_corrupt( 99 | tmp_path: Path, 100 | output: Output, 101 | capsys: pytest.CaptureFixture[str], 102 | ) -> None: 103 | """Test when the galaxy.yml file is missing. 104 | 105 | Args: 106 | tmp_path: Temporary directory. 107 | output: Output class object. 108 | capsys: Pytest fixture 109 | """ 110 | (tmp_path / "galaxy.yml").write_text(",") 111 | config = Config( 112 | args=Namespace(), 113 | term_features=TermFeatures(color=False, links=False), 114 | output=output, 115 | ) 116 | collection = Collection( 117 | config=config, 118 | path=tmp_path, 119 | cname="utils", 120 | cnamespace="ansible", 121 | local=False, 122 | original="ansible.utils", 123 | specifier="", 124 | opt_deps="", 125 | csource=[], 126 | ) 127 | with pytest.raises(SystemExit): 128 | get_galaxy(collection, output) 129 | 130 | captured = capsys.readouterr() 131 | assert "Failed to load yaml file:" in captured.err 132 | -------------------------------------------------------------------------------- /tests/unit/test_inspector.py: -------------------------------------------------------------------------------- 1 | """Tests for the inspector module.""" 2 | 3 | from __future__ import annotations 4 | 5 | import copy 6 | import importlib 7 | import json 8 | import re 9 | import sys 10 | 11 | from typing import TYPE_CHECKING 12 | 13 | from ansible_dev_environment.subcommands import inspector 14 | 15 | 16 | if TYPE_CHECKING: 17 | import pytest 18 | 19 | from ansible_dev_environment.config import Config 20 | 21 | 22 | def test_output_no_color(session_venv: Config, capsys: pytest.CaptureFixture[str]) -> None: 23 | """Test the inspector output. 24 | 25 | Args: 26 | session_venv: The configuration object for the venv. 27 | capsys: Pytest capture fixture. 28 | """ 29 | _inspector = inspector.Inspector(config=session_venv, output=session_venv._output) 30 | _inspector.run() 31 | captured = capsys.readouterr() 32 | data = json.loads(captured.out) 33 | assert "ansible.posix" in data 34 | assert "ansible.scm" in data 35 | assert "ansible.utils" in data 36 | 37 | 38 | def test_output_color( 39 | session_venv: Config, 40 | capsys: pytest.CaptureFixture[str], 41 | monkeypatch: pytest.MonkeyPatch, 42 | ) -> None: 43 | """Test the inspector output. 44 | 45 | Args: 46 | session_venv: The configuration object for the venv. 47 | capsys: Pytest capture fixture. 48 | monkeypatch: Pytest monkeypatch fixture. 49 | """ 50 | monkeypatch.setenv("FORCE_COLOR", "1") 51 | config = copy.deepcopy(session_venv) 52 | config.term_features.color = True 53 | _inspector = inspector.Inspector(config=config, output=session_venv._output) 54 | _inspector.run() 55 | captured = capsys.readouterr() 56 | assert captured.out.startswith("\x1b") 57 | ansi_escape = re.compile(r"(\x9B|\x1B\[)[0-?]*[ -\/]*[@-~]") 58 | no_ansi = ansi_escape.sub("", captured.out) 59 | data = json.loads(no_ansi) 60 | assert "ansible.posix" in data 61 | assert "ansible.scm" in data 62 | assert "ansible.utils" in data 63 | 64 | 65 | def test_no_rich( 66 | session_venv: Config, 67 | capsys: pytest.CaptureFixture[str], 68 | monkeypatch: pytest.MonkeyPatch, 69 | ) -> None: 70 | """Test the inspector output when rich is not available. 71 | 72 | Args: 73 | session_venv: The configuration object for the venv. 74 | capsys: Pytest capture fixture. 75 | monkeypatch: Pytest monkeypatch fixture. 76 | """ 77 | with monkeypatch.context() as monkey_rich: 78 | monkey_rich.setitem(sys.modules, "pip._vendor.rich", None) 79 | importlib.reload(inspector) 80 | assert inspector.HAS_RICH is False 81 | 82 | _inspector = inspector.Inspector(config=session_venv, output=session_venv._output) 83 | _inspector.run() 84 | 85 | importlib.reload(inspector) 86 | 87 | captured = capsys.readouterr() 88 | data = json.loads(captured.out) 89 | assert "ansible.posix" in data 90 | assert "ansible.scm" in data 91 | assert "ansible.utils" in data 92 | -------------------------------------------------------------------------------- /tests/unit/test_lister.py: -------------------------------------------------------------------------------- 1 | """Test the lister module.""" 2 | 3 | from __future__ import annotations 4 | 5 | import copy 6 | import tarfile 7 | 8 | from typing import TYPE_CHECKING 9 | 10 | import yaml 11 | 12 | from ansible_dev_environment.arg_parser import parse 13 | from ansible_dev_environment.config import Config 14 | from ansible_dev_environment.subcommands.installer import Installer 15 | from ansible_dev_environment.subcommands.lister import Lister 16 | from ansible_dev_environment.utils import JSONVal, collect_manifests 17 | 18 | 19 | if TYPE_CHECKING: 20 | from pathlib import Path 21 | 22 | import pytest 23 | 24 | from ansible_dev_environment.output import Output 25 | 26 | 27 | def test_success(session_venv: Config, capsys: pytest.CaptureFixture[str]) -> None: 28 | """Test the lister. 29 | 30 | Args: 31 | session_venv: The venv configuration. 32 | capsys: The capsys fixture. 33 | 34 | """ 35 | lister = Lister(config=session_venv, output=session_venv._output) 36 | lister.run() 37 | captured = capsys.readouterr() 38 | assert "ansible.scm" in captured.out 39 | assert "ansible.utils" in captured.out 40 | assert "ansible.posix" in captured.out 41 | 42 | 43 | def test_collection_info_corrupt( 44 | session_venv: Config, 45 | monkeypatch: pytest.MonkeyPatch, 46 | capsys: pytest.CaptureFixture[str], 47 | ) -> None: 48 | """Test the lister with corrupt collection info. 49 | 50 | Args: 51 | session_venv: The venv configuration. 52 | monkeypatch: The monkeypatch fixture. 53 | capsys: The capsys fixture. 54 | """ 55 | orig_collect_manifests = collect_manifests 56 | 57 | def mock_collect_manifests(target: Path, venv_cache_dir: Path) -> dict[str, dict[str, JSONVal]]: 58 | """Mock the manifest collection. 59 | 60 | Args: 61 | target: The target directory. 62 | venv_cache_dir: The venv cache directory. 63 | 64 | Returns: 65 | dict: The collection manifests. 66 | 67 | """ 68 | collections = orig_collect_manifests(target=target, venv_cache_dir=venv_cache_dir) 69 | collections["ansible.utils"]["collection_info"] = "This is not a valid dict." 70 | return collections 71 | 72 | monkeypatch.setattr( 73 | "ansible_dev_environment.subcommands.lister.collect_manifests", 74 | mock_collect_manifests, 75 | ) 76 | 77 | lister = Lister(config=session_venv, output=session_venv._output) 78 | lister.run() 79 | captured = capsys.readouterr() 80 | assert "Collection ansible.utils has malformed metadata." in captured.err 81 | 82 | 83 | def test_collection_info_collection_corrupt( 84 | session_venv: Config, 85 | monkeypatch: pytest.MonkeyPatch, 86 | capsys: pytest.CaptureFixture[str], 87 | ) -> None: 88 | """Test the lister with corrupt collection info for collections. 89 | 90 | Args: 91 | session_venv: The venv configuration. 92 | monkeypatch: The monkeypatch fixture. 93 | capsys: The capsys fixture. 94 | """ 95 | orig_collect_manifests = collect_manifests 96 | 97 | def mock_collect_manifests(target: Path, venv_cache_dir: Path) -> dict[str, dict[str, JSONVal]]: 98 | """Mock the manifest collection. 99 | 100 | Args: 101 | target: The target directory. 102 | venv_cache_dir: The venv cache directory. 103 | 104 | Returns: 105 | dict: The collection manifests. 106 | 107 | """ 108 | collections = orig_collect_manifests(target=target, venv_cache_dir=venv_cache_dir) 109 | assert isinstance(collections["ansible.utils"]["collection_info"], dict) 110 | assert isinstance(collections["ansible.scm"]["collection_info"], dict) 111 | assert isinstance(collections["ansible.posix"]["collection_info"], dict) 112 | collections["ansible.utils"]["collection_info"]["name"] = True 113 | collections["ansible.scm"]["collection_info"]["namespace"] = True 114 | collections["ansible.posix"]["collection_info"]["version"] = True 115 | return collections 116 | 117 | monkeypatch.setattr( 118 | "ansible_dev_environment.subcommands.lister.collect_manifests", 119 | mock_collect_manifests, 120 | ) 121 | 122 | lister = Lister(config=session_venv, output=session_venv._output) 123 | lister.run() 124 | captured = capsys.readouterr() 125 | assert "Collection ansible.utils has malformed metadata." in captured.err 126 | assert "Collection ansible.utils has malformed metadata." in captured.err 127 | assert "Collection ansible.scm has malformed metadata." in captured.err 128 | 129 | 130 | def test_broken_link( 131 | session_venv: Config, 132 | monkeypatch: pytest.MonkeyPatch, 133 | capsys: pytest.CaptureFixture[str], 134 | ) -> None: 135 | """Test the lister with corrupt repository URL. 136 | 137 | Args: 138 | session_venv: The venv configuration. 139 | monkeypatch: The monkeypatch fixture. 140 | capsys: The capsys fixture. 141 | """ 142 | config = copy.deepcopy(session_venv) 143 | config._output.term_features.links = True 144 | orig_collect_manifests = collect_manifests 145 | 146 | def mock_collect_manifests(target: Path, venv_cache_dir: Path) -> dict[str, dict[str, JSONVal]]: 147 | """Mock the manifest collection. 148 | 149 | Args: 150 | target: The target directory. 151 | venv_cache_dir: The venv cache directory. 152 | 153 | Returns: 154 | dict: The collection manifests. 155 | 156 | """ 157 | collections = orig_collect_manifests(target=target, venv_cache_dir=venv_cache_dir) 158 | assert isinstance(collections["ansible.utils"]["collection_info"], dict) 159 | collections["ansible.utils"]["collection_info"]["repository"] = True 160 | return collections 161 | 162 | monkeypatch.setattr( 163 | "ansible_dev_environment.subcommands.lister.collect_manifests", 164 | mock_collect_manifests, 165 | ) 166 | 167 | lister = Lister(config=session_venv, output=session_venv._output) 168 | lister.run() 169 | captured = capsys.readouterr() 170 | assert "Collection ansible.utils has malformed metadata." in captured.err 171 | 172 | 173 | def test_editable( 174 | tmp_path: Path, 175 | output: Output, 176 | galaxy_cache: Path, 177 | capsys: pytest.CaptureFixture[str], 178 | monkeypatch: pytest.MonkeyPatch, 179 | ) -> None: 180 | """Test the uninstaller against an editable collection. 181 | 182 | Because the galaxy tar doesn't have a galaxy.yml file, construct one. 183 | Uninstall twice to catch them not found error. Use the ansible.posix 184 | collection since it has no deps. 185 | 186 | Args: 187 | tmp_path: The tmp_path fixture. 188 | output: The output fixture. 189 | galaxy_cache: The galaxy_cache fixture. 190 | capsys: The capsys fixture. 191 | monkeypatch: The monkeypatch fixture. 192 | 193 | """ 194 | src_dir = tmp_path / "ansible.posix" 195 | tar_file_path = next(galaxy_cache.glob("ansible-posix*")) 196 | with tarfile.open(tar_file_path, "r") as tar: 197 | try: 198 | tar.extractall(src_dir, filter="data") 199 | except TypeError: 200 | tar.extractall(src_dir) # noqa: S202 201 | galaxy_contents = { 202 | "authors": "author", 203 | "name": "posix", 204 | "namespace": "ansible", 205 | "readme": "readme", 206 | "version": "1.0.0", 207 | } 208 | yaml.dump(galaxy_contents, (src_dir / "galaxy.yml").open("w")) 209 | 210 | monkeypatch.setattr( 211 | "sys.argv", 212 | [ 213 | "ade", 214 | "install", 215 | "--editable", 216 | str(src_dir), 217 | "--lf", 218 | str(tmp_path / "ade.log"), 219 | "--venv", 220 | str(tmp_path / "venv"), 221 | ], 222 | ) 223 | args = parse() 224 | config = Config(args=args, output=output, term_features=output.term_features) 225 | config.init() 226 | installer = Installer(config=config, output=config._output) 227 | installer.run() 228 | 229 | lister = Lister(config=config, output=config._output) 230 | lister.run() 231 | captured = capsys.readouterr() 232 | assert "ansible.posix" in captured.out 233 | assert str(tmp_path / "ansible.posix") in captured.out 234 | -------------------------------------------------------------------------------- /tests/unit/test_main.py: -------------------------------------------------------------------------------- 1 | """Test the main file.""" 2 | 3 | from __future__ import annotations 4 | 5 | import runpy 6 | 7 | import pytest 8 | 9 | 10 | def test_main(capsys: pytest.CaptureFixture[str], monkeypatch: pytest.MonkeyPatch) -> None: 11 | """Test the main file. 12 | 13 | Args: 14 | capsys: Capture stdout and stderr 15 | monkeypatch: Pytest monkeypatch fixture. 16 | """ 17 | monkeypatch.setattr("sys.argv", ["ansible-dev-environment"]) 18 | with pytest.raises(SystemExit): 19 | runpy.run_module("ansible_dev_environment", run_name="__main__", alter_sys=True) 20 | 21 | captured = capsys.readouterr() 22 | assert "the following arguments are required" in captured.err 23 | -------------------------------------------------------------------------------- /tests/unit/test_output.py: -------------------------------------------------------------------------------- 1 | """Test the output module.""" 2 | 3 | from __future__ import annotations 4 | 5 | import os 6 | 7 | from typing import TYPE_CHECKING 8 | 9 | import pytest 10 | 11 | from ansible_dev_environment.output import Color, Level, Msg, Output, console_width 12 | from ansible_dev_environment.utils import TermFeatures 13 | 14 | 15 | if TYPE_CHECKING: 16 | from pathlib import Path 17 | 18 | 19 | @pytest.mark.parametrize( 20 | argnames=("width", "expected"), 21 | argvalues=((79, 79), (131, 81), (133, 132)), 22 | ) 23 | def test_console_width(width: int, expected: int, monkeypatch: pytest.MonkeyPatch) -> None: 24 | """Test the console width function.""" 25 | 26 | def mock_get_terminal_size( 27 | fallback: tuple[int, int] = (80, 24), # noqa: ARG001 28 | ) -> tuple[int, int]: 29 | """Mock the get_terminal_size function. 30 | 31 | Args: 32 | fallback: Default terminal size. 33 | 34 | Returns: 35 | Mocked terminal size. 36 | """ 37 | return os.terminal_size((width, 24)) 38 | 39 | monkeypatch.setattr("shutil.get_terminal_size", mock_get_terminal_size) 40 | 41 | monkeypatch.delenv("COLUMNS", raising=False) 42 | 43 | assert console_width() == expected 44 | 45 | 46 | @pytest.mark.parametrize( 47 | "params", 48 | ( 49 | (Level.CRITICAL, Color.BRIGHT_RED), 50 | (Level.DEBUG, Color.GREY), 51 | (Level.ERROR, Color.RED), 52 | (Level.HINT, Color.CYAN), 53 | (Level.INFO, Color.MAGENTA), 54 | (Level.NOTE, Color.GREEN), 55 | (Level.WARNING, Color.YELLOW), 56 | ), 57 | ids=("critical", "debug", "error", "hint", "info", "note", "warning"), 58 | ) 59 | def test_color_mapping(params: tuple[Level, Color]) -> None: 60 | """Test the color mapping for Msg in the output module. 61 | 62 | Args: 63 | params: Tuple of Level and Color. 64 | """ 65 | assert Msg(message="", prefix=params[0]).color == str(params[1]) 66 | 67 | 68 | @pytest.mark.parametrize("level", ("info", "warning", "error", "debug", "critical", "hint", "note")) 69 | def test_console_output(level: str, capsys: pytest.CaptureFixture[str], tmp_path: Path) -> None: 70 | """Test the console output function. 71 | 72 | Args: 73 | level: Log level. 74 | capsys: Pytest fixture. 75 | tmp_path: Pytest fixture 76 | """ 77 | output = Output( 78 | log_file=str(tmp_path / "test.log"), 79 | log_level="debug", 80 | log_append="false", 81 | term_features=TermFeatures(color=True, links=True), 82 | verbosity=3, 83 | ) 84 | message = f"{level} message" 85 | msg = Msg(message=message, prefix=getattr(Level, level.upper())) 86 | if level == "critical": 87 | with pytest.raises(SystemExit): 88 | getattr(output, level)(message) 89 | else: 90 | getattr(output, level)(message) 91 | captured = capsys.readouterr() 92 | standard_x = captured.err if level in ("critical", "error") else captured.out 93 | assert standard_x.startswith(msg.color) 94 | assert standard_x.endswith(Color.END + "\n") 95 | assert level.capitalize() in standard_x 96 | assert message in standard_x 97 | 98 | 99 | def test_output_log_exists(tmp_path: Path) -> None: 100 | """Test the log file is reinitialized if append is false. 101 | 102 | Args: 103 | tmp_path: Pytest fixture. 104 | """ 105 | log_file = tmp_path / "test.log" 106 | log_file.write_text("test") 107 | pre_stat = log_file.stat() 108 | Output( 109 | log_file=str(log_file), 110 | log_level="debug", 111 | log_append="false", 112 | term_features=TermFeatures(color=True, links=True), 113 | verbosity=3, 114 | ) 115 | post_stat = log_file.stat() 116 | assert pre_stat.st_size > 0 117 | assert post_stat.st_size != pre_stat.st_size 118 | assert post_stat.st_size == 0 119 | -------------------------------------------------------------------------------- /tests/unit/test_tree.py: -------------------------------------------------------------------------------- 1 | # cspell:ignore mkey, mfour 2 | """Test the tree generator.""" 3 | 4 | from __future__ import annotations 5 | 6 | from typing import TYPE_CHECKING 7 | 8 | import pytest 9 | 10 | from ansible_dev_environment.tree import Tree 11 | from ansible_dev_environment.utils import TermFeatures 12 | 13 | 14 | if TYPE_CHECKING: 15 | from ansible_dev_environment.tree import JSONVal 16 | 17 | 18 | sample_1: JSONVal = { 19 | "key_one": "one", 20 | "key_two": 42, 21 | "key_three": True, 22 | "key_four": None, 23 | "key_five": ["one", "two", "three"], 24 | "key_six": { 25 | "key_one": "one", 26 | "key_two": 42, 27 | "key_three": True, 28 | "key_four": None, 29 | "key_five": ["one", "two", "three"], 30 | "key_six": { 31 | "key_one": "one", 32 | "key_two": 42, 33 | "key_three": True, 34 | "key_four": None, 35 | "key_five": ["one", "two", "three"], 36 | "key_six": { 37 | "key_one": "one", 38 | "key_two": 42, 39 | "key_three": True, 40 | "key_four": None, 41 | "key_five": ["one", "two", "three"], 42 | }, 43 | }, 44 | }, 45 | "key_seven": [{"foo": [1, 2, 3]}], 46 | "key_eight": [1, 2, 3], 47 | } 48 | 49 | result = r"""key_one 50 | └──one 51 | key_two 52 | └──42 53 | key_three 54 | └──True 55 | key_four 56 | └──None 57 | key_five 58 | ├──one 59 | ├──two 60 | └──three 61 | key_six 62 | ├──key_one 63 | │ └──one 64 | ├──key_two 65 | │ └──42 66 | ├──key_three 67 | │ └──True 68 | ├──key_four 69 | │ └──None 70 | ├──key_five 71 | │ ├──one 72 | │ ├──two 73 | │ └──three 74 | └──key_six 75 | ├──key_one 76 | │ └──one 77 | ├──key_two 78 | │ └──42 79 | ├──key_three 80 | │ └──True 81 | ├──key_four 82 | │ └──None 83 | ├──key_five 84 | │ ├──one 85 | │ ├──two 86 | │ └──three 87 | └──key_six 88 | ├──key_one 89 | │ └──one 90 | ├──key_two 91 | │ └──42 92 | ├──key_three 93 | │ └──True 94 | ├──key_four 95 | │ └──None 96 | └──key_five 97 | ├──one 98 | ├──two 99 | └──three 100 | key_seven 101 | └──foo 102 | ├──1 103 | ├──2 104 | └──3 105 | key_eight 106 | ├──1 107 | ├──2 108 | └──3 109 | """ 110 | 111 | 112 | def test_tree_large() -> None: 113 | """Test the tree generator.""" 114 | term_features = TermFeatures(color=False, links=False) 115 | 116 | assert Tree(obj=sample_1, term_features=term_features).render() == result 117 | 118 | 119 | sample_2: JSONVal = { 120 | "key_one": True, 121 | "key_two": 42, 122 | "key_three": None, 123 | "key_four": "four", 124 | "key_five": [{"a": 1}, {"b": 2}], 125 | } 126 | 127 | expected = [ 128 | "\x1b[34mkey_one\x1b[0m", 129 | "└──\x1b[32mTrue\x1b[0m", 130 | "\x1b[34mkey_two\x1b[0m", 131 | "└──\x1b[32m42\x1b[0m", 132 | "\x1b[34mkey_three\x1b[0m", 133 | "└──\x1b[32mNone\x1b[0m", 134 | "\x1b[34mkey_four\x1b[0m", 135 | "└──\x1b[32mfour\x1b[0m", 136 | "key_five", 137 | "├──\x1b[3m0\x1b[0m", 138 | "│ └──a", 139 | "│ └──1", 140 | "└──\x1b[3m1\x1b[0m", 141 | " └──b", 142 | " └──\x1b[34m\x1b]8;;http://red.ht\x1b\\2\x1b]8;;\x1b\\\x1b[0m", 143 | ] 144 | 145 | 146 | def test_tree_color() -> None: 147 | """Test the tree generator.""" 148 | term_features = TermFeatures(color=True, links=True) 149 | tree = Tree(obj=sample_2, term_features=term_features) 150 | tree.blue = ["key_one", "key_two", "key_three", "key_four"] 151 | tree.green = [True, 42, None, "four"] 152 | tree.links = {"2": "http://red.ht"} 153 | rendered = tree.render().splitlines() 154 | assert rendered == expected 155 | 156 | 157 | def test_tree_fail() -> None: 158 | """Test a tree failure.""" 159 | term_features = TermFeatures(color=False, links=False) 160 | tree = Tree(obj=(1, 2, 3), term_features=term_features) # type: ignore[arg-type] 161 | with pytest.raises(TypeError, match="Invalid type "): 162 | tree.render() 163 | -------------------------------------------------------------------------------- /tests/unit/test_treemaker.py: -------------------------------------------------------------------------------- 1 | """Test the treemaker module.""" 2 | 3 | from __future__ import annotations 4 | 5 | from argparse import Namespace 6 | from typing import TYPE_CHECKING 7 | from venv import EnvBuilder 8 | 9 | import pytest 10 | 11 | from ansible_dev_environment.config import Config 12 | from ansible_dev_environment.subcommands.treemaker import TreeMaker, TreeWithReqs, add_python_reqs 13 | 14 | 15 | if TYPE_CHECKING: 16 | from pathlib import Path 17 | 18 | from ansible_dev_environment.output import Output 19 | from ansible_dev_environment.utils import JSONVal 20 | 21 | 22 | class SafeEnvBuilder(EnvBuilder): 23 | """Safer EnvBuilder that defaults symlinks to True.""" 24 | 25 | # pylint: disable=too-many-arguments 26 | def __init__( # noqa: PLR0913 27 | self, 28 | *, 29 | system_site_packages: bool = False, 30 | clear: bool = False, 31 | symlinks: bool = True, 32 | upgrade: bool = False, 33 | with_pip: bool = False, 34 | prompt: str | None = None, 35 | upgrade_deps: bool = False, 36 | ) -> None: 37 | """Ensure that symlinks defaults to True because otherwise it will create broken venvs with tools like pyenv, asdf or mise.""" 38 | super().__init__( 39 | system_site_packages=system_site_packages, 40 | clear=clear, 41 | symlinks=symlinks, 42 | upgrade=upgrade, 43 | with_pip=with_pip, 44 | prompt=prompt, 45 | upgrade_deps=upgrade_deps, 46 | ) 47 | 48 | 49 | def test_tree_empty( 50 | capsys: pytest.CaptureFixture[str], 51 | output: Output, 52 | tmp_path: Path, 53 | ) -> None: 54 | """Test tree_not_dict. 55 | 56 | Args: 57 | capsys: Pytest stdout capture fixture. 58 | output: Output class object. 59 | tmp_path: Pytest fixture. 60 | """ 61 | venv_path = tmp_path / "venv" 62 | env = SafeEnvBuilder() 63 | env.create(venv_path) 64 | 65 | args = Namespace( 66 | venv=venv_path, 67 | verbose=0, 68 | subcommand="tree", 69 | uv=True, 70 | ) 71 | output._verbosity = 0 72 | config = Config(args=args, output=output, term_features=output.term_features) 73 | config.init() 74 | treemaker = TreeMaker(config=config, output=output) 75 | treemaker.run() 76 | captured = capsys.readouterr() 77 | assert captured.out == "\n\n" 78 | assert not captured.err 79 | 80 | 81 | def test_tree_malformed_info( 82 | capsys: pytest.CaptureFixture[str], 83 | monkeypatch: pytest.MonkeyPatch, 84 | output: Output, 85 | tmp_path: Path, 86 | ) -> None: 87 | """Test malformed collection info. 88 | 89 | Args: 90 | capsys: Pytest stdout capture fixture. 91 | monkeypatch: Pytest fixture. 92 | output: Output class object. 93 | tmp_path: Pytest fixture. 94 | """ 95 | venv_path = tmp_path / "venv" 96 | SafeEnvBuilder().create(venv_path) 97 | 98 | args = Namespace( 99 | venv=venv_path, 100 | verbose=0, 101 | subcommand="tree", 102 | uv=True, 103 | ) 104 | 105 | def collect_manifests( 106 | target: Path, 107 | venv_cache_dir: Path, 108 | ) -> dict[str, dict[str, JSONVal]]: 109 | """Return a malformed collection info. 110 | 111 | Args: 112 | target: Target path. 113 | venv_cache_dir: Venv cache directory. 114 | 115 | Returns: 116 | Collection info. 117 | """ 118 | assert target 119 | assert venv_cache_dir 120 | return { 121 | "collection_one": { 122 | "collection_info": "malformed", 123 | }, 124 | } 125 | 126 | monkeypatch.setattr( 127 | "ansible_dev_environment.subcommands.treemaker.collect_manifests", 128 | collect_manifests, 129 | ) 130 | config = Config(args=args, output=output, term_features=output.term_features) 131 | config.init() 132 | treemaker = TreeMaker(config=config, output=output) 133 | treemaker.run() 134 | captured = capsys.readouterr() 135 | assert "Collection collection_one has malformed metadata." in captured.err 136 | 137 | 138 | def test_tree_malformed_deps( 139 | capsys: pytest.CaptureFixture[str], 140 | monkeypatch: pytest.MonkeyPatch, 141 | output: Output, 142 | tmp_path: Path, 143 | ) -> None: 144 | """Test malformed collection deps. 145 | 146 | Args: 147 | capsys: Pytest stdout capture fixture. 148 | monkeypatch: Pytest fixture. 149 | output: Output class object. 150 | tmp_path: Pytest fixture. 151 | """ 152 | venv_path = tmp_path / "venv" 153 | SafeEnvBuilder().create(venv_path) 154 | 155 | args = Namespace( 156 | venv=venv_path, 157 | verbose=0, 158 | subcommand="tree", 159 | uv=True, 160 | ) 161 | 162 | def collect_manifests( 163 | target: Path, 164 | venv_cache_dir: Path, 165 | ) -> dict[str, dict[str, JSONVal]]: 166 | """Return a malformed collection info. 167 | 168 | Args: 169 | target: Target path. 170 | venv_cache_dir: Venv cache directory. 171 | 172 | Returns: 173 | Collection info. 174 | """ 175 | assert target 176 | assert venv_cache_dir 177 | return { 178 | "collection_one": { 179 | "collection_info": { 180 | "dependencies": "malformed", 181 | }, 182 | }, 183 | } 184 | 185 | monkeypatch.setattr( 186 | "ansible_dev_environment.subcommands.treemaker.collect_manifests", 187 | collect_manifests, 188 | ) 189 | config = Config(args=args, output=output, term_features=output.term_features) 190 | config.init() 191 | treemaker = TreeMaker(config=config, output=output) 192 | treemaker.run() 193 | captured = capsys.readouterr() 194 | assert "Collection collection_one has malformed metadata." in captured.err 195 | 196 | 197 | def test_tree_malformed_deps_not_string( 198 | capsys: pytest.CaptureFixture[str], 199 | monkeypatch: pytest.MonkeyPatch, 200 | output: Output, 201 | tmp_path: Path, 202 | ) -> None: 203 | """Test malformed collection deps. 204 | 205 | Args: 206 | capsys: Pytest stdout capture fixture. 207 | monkeypatch: Pytest fixture. 208 | output: Output class object. 209 | tmp_path: Pytest fixture. 210 | """ 211 | venv_path = tmp_path / "venv" 212 | SafeEnvBuilder().create(venv_path) 213 | 214 | args = Namespace( 215 | venv=venv_path, 216 | verbose=0, 217 | subcommand="tree", 218 | uv=True, 219 | ) 220 | 221 | def collect_manifests( 222 | target: Path, 223 | venv_cache_dir: Path, 224 | ) -> dict[str, dict[str, dict[str, dict[int, int]]]]: 225 | """Return a malformed collection info. 226 | 227 | Args: 228 | target: Target path. 229 | venv_cache_dir: Venv cache directory. 230 | 231 | Returns: 232 | Collection info. 233 | """ 234 | assert target 235 | assert venv_cache_dir 236 | return { 237 | "collection_one": { 238 | "collection_info": { 239 | "dependencies": {1: 2}, 240 | }, 241 | }, 242 | } 243 | 244 | monkeypatch.setattr( 245 | "ansible_dev_environment.subcommands.treemaker.collect_manifests", 246 | collect_manifests, 247 | ) 248 | config = Config(args=args, output=output, term_features=output.term_features) 249 | config.init() 250 | treemaker = TreeMaker(config=config, output=output) 251 | treemaker.run() 252 | captured = capsys.readouterr() 253 | assert "Collection collection_one has malformed dependency." in captured.err 254 | 255 | 256 | def test_tree_malformed_repo_not_string( 257 | monkeypatch: pytest.MonkeyPatch, 258 | output: Output, 259 | tmp_path: Path, 260 | capsys: pytest.CaptureFixture[str], 261 | ) -> None: 262 | """Test malformed collection repo. 263 | 264 | Args: 265 | monkeypatch: Pytest fixture. 266 | output: Output class object. 267 | tmp_path: Pytest fixture. 268 | capsys: Pytest stdout capture fixture. 269 | """ 270 | venv_path = tmp_path / "venv" 271 | SafeEnvBuilder().create(venv_path) 272 | 273 | args = Namespace(venv=venv_path, verbose=0, subcommand="tree", uv=True) 274 | 275 | def collect_manifests( 276 | target: Path, 277 | venv_cache_dir: Path, 278 | ) -> dict[str, dict[str, JSONVal]]: 279 | """Return a malformed collection info. 280 | 281 | Args: 282 | target: Target path. 283 | venv_cache_dir: Venv cache directory. 284 | 285 | Returns: 286 | Collection info. 287 | """ 288 | assert target 289 | assert venv_cache_dir 290 | return { 291 | "collection_one": { 292 | "collection_info": { 293 | "dependencies": {}, 294 | "repository": True, 295 | }, 296 | }, 297 | } 298 | 299 | monkeypatch.setattr( 300 | "ansible_dev_environment.subcommands.treemaker.collect_manifests", 301 | collect_manifests, 302 | ) 303 | config = Config(args=args, output=output, term_features=output.term_features) 304 | config.init() 305 | treemaker = TreeMaker(config=config, output=output) 306 | treemaker.run() 307 | captured = capsys.readouterr() 308 | assert "Collection collection_one has malformed repository metadata." in captured.err 309 | 310 | 311 | def test_tree_verbose(session_venv: Config, capsys: pytest.CaptureFixture[str]) -> None: 312 | """Test tree verbose, the session_venv has v=3. 313 | 314 | Args: 315 | session_venv: Pytest fixture. 316 | capsys: Pytest stdout capture fixture. 317 | """ 318 | treemaker = TreeMaker(config=session_venv, output=session_venv._output) 319 | treemaker.run() 320 | captured = capsys.readouterr() 321 | assert "└──python requirements" in captured.out 322 | assert "xmltodict" in captured.out 323 | 324 | 325 | def test_reqs_no_pound( 326 | session_venv: Config, 327 | capsys: pytest.CaptureFixture[str], 328 | monkeypatch: pytest.MonkeyPatch, 329 | ) -> None: 330 | """Test python deps with no pound signs in the line, cannot be attributed to a collection. 331 | 332 | Args: 333 | session_venv: Pytest fixture. 334 | capsys: Pytest stdout capture fixture. 335 | monkeypatch: Pytest fixture for patching. 336 | """ 337 | 338 | def builder_introspect(config: Config, output: Output) -> None: 339 | """Mock builder introspect. 340 | 341 | Args: 342 | config: The application configuration. 343 | output: The application output object. 344 | """ 345 | assert output 346 | config.discovered_python_reqs.write_text("xmltodict\n") 347 | 348 | monkeypatch.setattr( 349 | "ansible_dev_environment.subcommands.treemaker.builder_introspect", 350 | builder_introspect, 351 | ) 352 | 353 | treemaker = TreeMaker(config=session_venv, output=session_venv._output) 354 | treemaker.run() 355 | captured = capsys.readouterr() 356 | assert "└──python requirements" in captured.out 357 | assert "xmltodict" not in captured.out 358 | 359 | 360 | def test_collection_is_a_list() -> None: 361 | """Confirm a TypeError is the collection isn't a dict.""" 362 | tree_dict: TreeWithReqs = {"test_collection": []} 363 | with pytest.raises(TypeError): 364 | add_python_reqs(tree_dict, "test_collection", ["xmltodict"]) 365 | -------------------------------------------------------------------------------- /tests/unit/test_uninstaller.py: -------------------------------------------------------------------------------- 1 | """Test the uninstaller module.""" 2 | 3 | from __future__ import annotations 4 | 5 | import copy 6 | 7 | from typing import TYPE_CHECKING 8 | 9 | import pytest 10 | 11 | from ansible_dev_environment.arg_parser import parse 12 | from ansible_dev_environment.config import Config 13 | from ansible_dev_environment.subcommands.installer import Installer 14 | from ansible_dev_environment.subcommands.uninstaller import UnInstaller 15 | 16 | 17 | if TYPE_CHECKING: 18 | from pathlib import Path 19 | 20 | from ansible_dev_environment.output import Output 21 | 22 | 23 | def test_many(session_venv: Config, capsys: pytest.CaptureFixture[str]) -> None: 24 | """Test the uninstaller with many collections. 25 | 26 | Args: 27 | session_venv: The session_venv fixture. 28 | capsys: The capsys fixture. 29 | """ 30 | config = copy.deepcopy(session_venv) 31 | config.args.collection_specifier = ["community.general", "ansible.utils"] 32 | uninstaller = UnInstaller(config=config, output=config._output) 33 | with pytest.raises(SystemExit) as exc: 34 | uninstaller.run() 35 | assert exc.value.code == 1 36 | captured = capsys.readouterr() 37 | assert "Only one collection can be uninstalled at a time." in captured.err 38 | 39 | 40 | def test_missing_reqs( 41 | session_venv: Config, 42 | tmp_path: Path, 43 | capsys: pytest.CaptureFixture[str], 44 | ) -> None: 45 | """Test the uninstaller against a missing requirements file. 46 | 47 | Args: 48 | session_venv: The session_venv fixture. 49 | tmp_path: The tmp_path fixture. 50 | capsys: The capsys fixture. 51 | """ 52 | config = copy.deepcopy(session_venv) 53 | config.args.requirement = str(tmp_path / "requirements.yml") 54 | uninstaller = UnInstaller(config=config, output=config._output) 55 | with pytest.raises(SystemExit) as exc: 56 | uninstaller.run() 57 | assert exc.value.code == 1 58 | captured = capsys.readouterr() 59 | assert "Failed to find requirements file" in captured.err 60 | 61 | 62 | def test_editable_uninstall( 63 | tmp_path: Path, 64 | installable_local_collection: Path, 65 | output: Output, 66 | capsys: pytest.CaptureFixture[str], 67 | monkeypatch: pytest.MonkeyPatch, 68 | ) -> None: 69 | """Test the uninstaller against an editable collection. 70 | 71 | Because the galaxy tar doesn't have a galaxy.yml file, construct one. 72 | Uninstall twice to catch them not found error. Use the ansible.posix 73 | collection since it has no deps. 74 | 75 | Args: 76 | tmp_path: The tmp_path fixture. 77 | installable_local_collection: The installable_local_collection fixture. 78 | output: The output fixture. 79 | capsys: The capsys fixture. 80 | monkeypatch: The monkeypatch fixture. 81 | 82 | """ 83 | monkeypatch.setattr( 84 | "sys.argv", 85 | [ 86 | "ade", 87 | "install", 88 | "--editable", 89 | str(installable_local_collection), 90 | "--lf", 91 | str(tmp_path / "ade.log"), 92 | "--venv", 93 | str(tmp_path / "venv"), 94 | "-vvv", 95 | ], 96 | ) 97 | args = parse() 98 | config = Config(args=args, output=output, term_features=output.term_features) 99 | config.init() 100 | installer = Installer(config=config, output=config._output) 101 | installer.run() 102 | args.collection_specifier = ["ansible.posix"] 103 | uninstaller = UnInstaller(config=config, output=config._output) 104 | uninstaller.run() 105 | uninstaller.run() 106 | captured = capsys.readouterr() 107 | assert "Removed ansible.posix" in captured.out 108 | assert "Failed to find ansible.posix" in captured.out 109 | -------------------------------------------------------------------------------- /tests/unit/test_utils.py: -------------------------------------------------------------------------------- 1 | """Unit test for the utilities module.""" 2 | 3 | from __future__ import annotations 4 | 5 | from argparse import Namespace 6 | from pathlib import Path 7 | 8 | import pytest 9 | 10 | from ansible_dev_environment.collection import ( 11 | Collection, 12 | parse_collection_request, 13 | ) 14 | from ansible_dev_environment.config import Config 15 | from ansible_dev_environment.output import Output 16 | from ansible_dev_environment.utils import ( 17 | TermFeatures, 18 | builder_introspect, 19 | opt_deps_to_files, 20 | str_to_bool, 21 | ) 22 | 23 | 24 | term_features = TermFeatures(color=False, links=False) 25 | 26 | output = Output( 27 | log_file=str(Path.cwd() / "ansible-dev-environment.log"), 28 | log_level="notset", 29 | log_append="true", 30 | term_features=term_features, 31 | verbosity=0, 32 | ) 33 | config = Config( 34 | args=Namespace(), 35 | term_features=term_features, 36 | output=output, 37 | ) 38 | 39 | FIXTURE_DIR = Path(__file__).parent.parent.resolve() / "fixtures" 40 | scenarios = ( 41 | ( 42 | "ansible.utils", 43 | Collection( 44 | config=config, 45 | cname="utils", 46 | cnamespace="ansible", 47 | local=False, 48 | original="ansible.utils", 49 | specifier="", 50 | path=Path(), 51 | opt_deps="", 52 | csource=[], 53 | ), 54 | ), 55 | ( 56 | "ansible.utils:1.0.0", 57 | Collection( 58 | config=config, 59 | cname="utils", 60 | cnamespace="ansible", 61 | specifier=":1.0.0", 62 | local=False, 63 | original="ansible.utils:1.0.0", 64 | path=Path(), 65 | opt_deps="", 66 | csource=[], 67 | ), 68 | ), 69 | ( 70 | "ansible.utils>=1.0.0", 71 | Collection( 72 | config=config, 73 | cname="utils", 74 | cnamespace="ansible", 75 | specifier=">=1.0.0", 76 | local=False, 77 | original="ansible.utils>=1.0.0", 78 | path=Path(), 79 | opt_deps="", 80 | csource=[], 81 | ), 82 | ), 83 | ( 84 | str(FIXTURE_DIR), 85 | Collection( 86 | cname="cname", 87 | cnamespace="cnamespace", 88 | config=config, 89 | local=True, 90 | path=FIXTURE_DIR, 91 | specifier="", 92 | original=str(FIXTURE_DIR), 93 | opt_deps="", 94 | csource=[], 95 | ), 96 | ), 97 | ( 98 | str(FIXTURE_DIR) + "/[test]", 99 | Collection( 100 | cname="cname", 101 | cnamespace="cnamespace", 102 | config=config, 103 | local=True, 104 | opt_deps="test", 105 | path=FIXTURE_DIR, 106 | specifier="", 107 | original=str(FIXTURE_DIR) + "/[test]", 108 | csource=[], 109 | ), 110 | ), 111 | ("/foo/bar", None), 112 | ("abcdefg", None), 113 | ("/12345678901234567890[test]", None), 114 | ("not_a_collection_name", None), 115 | ) 116 | 117 | 118 | @pytest.mark.parametrize("scenario", scenarios) 119 | def test_parse_collection_request(scenario: tuple[str, Collection | None]) -> None: 120 | """Test that the parse_collection_request function works as expected. 121 | 122 | Args: 123 | scenario: A tuple containing the string to parse and the expected result. 124 | """ 125 | string, spec = scenario 126 | if spec is None: 127 | with pytest.raises(SystemExit): 128 | parse_collection_request(string=string, config=config, output=output) 129 | else: 130 | assert parse_collection_request(string=string, config=config, output=output) == spec 131 | 132 | 133 | def test_builder_found( 134 | tmp_path: Path, 135 | monkeypatch: pytest.MonkeyPatch, 136 | session_venv: Config, 137 | ) -> None: 138 | """Test that builder is found. 139 | 140 | Args: 141 | tmp_path: A temporary path 142 | monkeypatch: The pytest Monkeypatch fixture 143 | session_venv: The session venv 144 | 145 | """ 146 | 147 | @property # type: ignore[misc] 148 | def cache_dir(_self: Config) -> Path: 149 | """Return a temporary cache directory. 150 | 151 | Args: 152 | _self: The Config object 153 | 154 | Returns: 155 | A temporary cache directory. 156 | """ 157 | return tmp_path 158 | 159 | monkeypatch.setattr(Config, "cache_dir", cache_dir) 160 | 161 | args = Namespace( 162 | venv=session_venv.venv, 163 | system_site_packages=False, 164 | verbose=0, 165 | subcommand="check", 166 | uv=True, 167 | ) 168 | 169 | cfg = Config( 170 | args=args, 171 | term_features=term_features, 172 | output=output, 173 | ) 174 | cfg.init() 175 | 176 | builder_introspect(cfg, output) 177 | 178 | assert cfg.discovered_bindep_reqs.exists() is True 179 | assert cfg.discovered_python_reqs.exists() is True 180 | 181 | 182 | def test_str_to_bool() -> None: 183 | """Test the str_to_bool function. 184 | 185 | This function tests the conversion of string values to boolean values. 186 | """ 187 | assert str_to_bool("true") is True 188 | assert str_to_bool("True") is True 189 | assert str_to_bool("1") is True 190 | assert str_to_bool("yes") is True 191 | assert str_to_bool("y") is True 192 | assert str_to_bool("on") is True 193 | 194 | assert str_to_bool("false") is False 195 | assert str_to_bool("False") is False 196 | assert str_to_bool("0") is False 197 | assert str_to_bool("no") is False 198 | assert str_to_bool("n") is False 199 | assert str_to_bool("off") is False 200 | 201 | assert str_to_bool("anything else") is None 202 | 203 | 204 | def test_opt_deps_to_files(tmp_path: Path, capsys: pytest.CaptureFixture[str]) -> None: 205 | """Test the opt_deps_to_files function. 206 | 207 | Args: 208 | tmp_path: A temporary path 209 | capsys: The pytest LogCaptureFixture 210 | """ 211 | # Create a temporary file with some content 212 | f1 = tmp_path / "test-requirements.txt" 213 | f1.touch() 214 | f2 = tmp_path / "requirements-dev.txt" 215 | f2.touch() 216 | 217 | collection = Collection( 218 | config=config, 219 | cname="cname", 220 | cnamespace="cnamespace", 221 | local=True, 222 | path=tmp_path, 223 | specifier="", 224 | original=str(tmp_path), 225 | opt_deps="test,dev,foo", 226 | csource=[], 227 | ) 228 | 229 | result = opt_deps_to_files(collection, output) 230 | 231 | captured = capsys.readouterr() 232 | 233 | assert result[0] == f1 234 | assert result[1] == f2 235 | assert "Error: Failed to find optional dependency file for 'foo'." in captured.err 236 | -------------------------------------------------------------------------------- /tools/report-coverage: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -euo pipefail 3 | coverage combine -q "--data-file=${TOX_ENV_DIR}/.coverage" "${TOX_ENV_DIR}"/.coverage.* 4 | coverage xml "--data-file=${TOX_ENV_DIR}/.coverage" -o "${TOX_ENV_DIR}/coverage.xml" --ignore-errors --fail-under=0 5 | COVERAGE_FILE="${TOX_ENV_DIR}/.coverage" coverage lcov --fail-under=0 --ignore-errors -q 6 | COVERAGE_FILE="${TOX_ENV_DIR}/.coverage" coverage report --fail-under=0 --ignore-errors 7 | -------------------------------------------------------------------------------- /tox.ini: -------------------------------------------------------------------------------- 1 | [tox] 2 | requires = 3 | setuptools>=65.3 4 | tox>=4.25 5 | tox-extra>=2.1 6 | tox-uv>=1.25 7 | env_list = 8 | py 9 | deps 10 | docs 11 | lint 12 | devel 13 | pkg 14 | skip_missing_interpreters = true 15 | 16 | [testenv] 17 | description = Run pytest 18 | devel: with development dependencies 19 | package = editable 20 | deps = 21 | devel: ansible-core@ git+https://github.com/ansible/ansible.git 22 | extras = 23 | test 24 | pass_env = 25 | ANSIBLE_* 26 | CI 27 | CONTAINER_* 28 | DOCKER_* 29 | GITHUB_* 30 | HOME 31 | PYTEST_* 32 | SSH_AUTH_SOCK 33 | TERM 34 | USER 35 | set_env = 36 | COVERAGE_COMBINED = {envdir}/.coverage 37 | COVERAGE_FILE = {env:COVERAGE_FILE:{env_dir}/.coverage.{envname}} 38 | COVERAGE_PROCESS_START = {toxinidir}/pyproject.toml 39 | FORCE_COLOR = 1 40 | PIP_CONSTRAINT = {toxinidir}/.config/constraints.txt 41 | PRE_COMMIT_COLOR = always 42 | TERM = xterm-256color 43 | UV_CONSTRAINT = {toxinidir}/.config/constraints.txt 44 | deps, devel: PIP_CONSTRAINT = /dev/null 45 | deps, devel: UV_CONSTRAINT = /dev/null 46 | commands_pre = 47 | sh -c "rm -f {envdir}/.coverage* 2>/dev/null || true" 48 | commands = 49 | python -c 'import pathlib; pathlib.Path("{env_site_packages_dir}/cov.pth").write_text("import coverage; coverage.process_startup()")' 50 | coverage run -m pytest {posargs:-n auto --junitxml=./junit.xml} 51 | commands_post = 52 | {py,py310,py311,py312,py313}: ./tools/report-coverage 53 | allowlist_externals = 54 | ./tools/report-coverage 55 | git 56 | rm 57 | sh 58 | 59 | [testenv:deps] 60 | description = Bump all dependencies 61 | base_python = python3.11 62 | skip_install = true 63 | deps = 64 | {[testenv:lint]deps} 65 | extras = 66 | set_env = 67 | PIP_CONSTRAINT = /dev/null 68 | commands_pre = 69 | commands = 70 | -pre-commit run --all-files --show-diff-on-failure --hook-stage manual deps 71 | -pre-commit autoupdate 72 | git diff --exit-code 73 | env_dir = {toxworkdir}/lint 74 | 75 | [testenv:docs] 76 | description = Builds docs 77 | package = editable 78 | skip_install = false 79 | extras = 80 | docs 81 | set_env = 82 | NO_COLOR = 1 83 | TERM = dump 84 | commands = 85 | mkdocs build --strict --site-dir=_readthedocs/html/ {posargs:} 86 | 87 | [testenv:lint] 88 | description = Enforce quality standards under {basepython} 89 | skip_install = true 90 | deps = 91 | pre-commit>=4.0.1 92 | pre-commit-uv>=4.1.4 93 | setuptools>=51.1.1 94 | set_env = 95 | PIP_CONSTRAINT = /dev/null 96 | commands = 97 | pre-commit run --show-diff-on-failure --all-files 98 | 99 | [testenv:pkg] 100 | description = 101 | Do packaging/distribution 102 | skip_install = true 103 | deps = 104 | build>=0.9 105 | twine >= 4.0.2 # pyup: ignore 106 | set_env = 107 | commands = 108 | rm -rfv {toxinidir}/dist/ 109 | python -m build --outdir {toxinidir}/dist/ {toxinidir} 110 | sh -c "python -m twine check --strict {toxinidir}/dist/*" 111 | --------------------------------------------------------------------------------