├── .config
├── constraints.txt
├── pydoclint-baseline.txt
├── python3-ansible-compat.spec
├── requirements-docs.in
├── requirements-test.in
└── requirements.in
├── .git_archival.txt
├── .gitattributes
├── .github
├── CODEOWNERS
├── CODE_OF_CONDUCT.md
├── ISSUE_TEMPLATE
│ └── bug_report.md
├── dependabot.yml
├── release-drafter.yml
└── workflows
│ ├── ack.yml
│ ├── push.yml
│ ├── release.yml
│ └── tox.yml
├── .gitignore
├── .packit.yaml
├── .pre-commit-config.yaml
├── .prettierignore
├── .prettierrc.yaml
├── .python-version
├── .readthedocs.yml
├── .sonarcloud.properties
├── .taplo.toml
├── .vscode
├── extensions.json
└── settings.json
├── .yamllint
├── LICENSE
├── README.md
├── ansible.cfg
├── codecov.yml
├── docs
├── api.md
├── images
│ ├── favicon.ico
│ ├── logo.png
│ └── logo.svg
└── index.md
├── examples
├── reqs_broken
│ └── requirements.yml
├── reqs_v1
│ └── requirements.yml
└── reqs_v2
│ ├── community-molecule-0.1.0.tar.gz
│ └── requirements.yml
├── mise.toml
├── mkdocs.yml
├── package-lock.json
├── package.json
├── pyproject.toml
├── src
└── ansible_compat
│ ├── __init__.py
│ ├── config.py
│ ├── constants.py
│ ├── errors.py
│ ├── loaders.py
│ ├── ports.py
│ ├── prerun.py
│ ├── py.typed
│ ├── runtime.py
│ ├── schema.py
│ └── types.py
├── test
├── __init__.py
├── assets
│ ├── galaxy_paths
│ │ ├── .bar
│ │ │ └── galaxy.yml
│ │ └── foo
│ │ │ └── galaxy.yml
│ ├── requirements-invalid-collection.yml
│ ├── requirements-invalid-role.yml
│ ├── validate0_data.json
│ ├── validate0_expected.json
│ └── validate0_schema.json
├── collections
│ ├── acme.broken
│ │ └── galaxy.yml
│ ├── acme.goodies
│ │ ├── galaxy.yml
│ │ ├── molecule
│ │ │ └── default
│ │ │ │ ├── converge.yml
│ │ │ │ └── molecule.yml
│ │ ├── roles
│ │ │ └── baz
│ │ │ │ ├── molecule
│ │ │ │ └── deep_scenario
│ │ │ │ │ ├── converge.yml
│ │ │ │ │ └── molecule.yml
│ │ │ │ └── tasks
│ │ │ │ └── main.yml
│ │ └── tests
│ │ │ └── requirements.yml
│ └── acme.minimal
│ │ └── galaxy.yml
├── conftest.py
├── roles
│ ├── acme.missing_deps
│ │ ├── meta
│ │ │ └── main.yml
│ │ └── requirements.yml
│ ├── acme.sample2
│ │ └── meta
│ │ │ └── main.yml
│ ├── ansible-role-sample
│ │ └── meta
│ │ │ └── main.yml
│ ├── sample3
│ │ └── meta
│ │ │ └── main.yml
│ └── sample4
│ │ └── meta
│ │ └── main.yml
├── test_api.py
├── test_config.py
├── test_configuration_example.py
├── test_loaders.py
├── test_prerun.py
├── test_runtime.py
├── test_runtime_example.py
├── test_runtime_scan_path.py
├── test_schema.py
├── test_types.py
└── test_version.py
├── tools
├── get-version.sh
├── report-coverage
├── smoke.py
└── update-spec.sh
└── tox.ini
/.config/constraints.txt:
--------------------------------------------------------------------------------
1 | # This file was autogenerated by uv via the following command:
2 | # tox run deps
3 | argparse-manpage==4.6 # via ansible-compat (pyproject.toml)
4 | attrs==25.3.0 # via jsonschema, referencing
5 | babel==2.17.0 # via mkdocs-material
6 | backrefs==5.8 # via mkdocs-material
7 | beautifulsoup4==4.13.4 # via linkchecker, mkdocs-htmlproofer-plugin
8 | black==25.1.0 # via ansible-compat (pyproject.toml)
9 | cairocffi==1.7.1 # via cairosvg
10 | cairosvg==2.7.1 # via mkdocs-ansible
11 | certifi==2025.4.26 # via requests
12 | cffi==1.17.1 # via cairocffi, cryptography
13 | charset-normalizer==3.4.2 # via requests
14 | click==8.2.0 # via black, mkdocs
15 | colorama==0.4.6 # via griffe, mkdocs-material
16 | coverage==7.8.0 # via ansible-compat (pyproject.toml)
17 | cryptography==45.0.2 # via ansible-core
18 | csscompressor==0.9.5 # via mkdocs-minify-plugin
19 | cssselect2==0.8.0 # via cairosvg
20 | defusedxml==0.7.1 # via cairosvg
21 | dnspython==2.7.0 # via linkchecker
22 | exceptiongroup==1.3.0 # via pytest
23 | ghp-import==2.1.0 # via mkdocs
24 | griffe==1.7.3 # via mkdocstrings-python
25 | hjson==3.1.0 # via mkdocs-macros-plugin, super-collections
26 | htmlmin2==0.1.13 # via mkdocs-minify-plugin
27 | idna==3.10 # via requests
28 | iniconfig==2.1.0 # via pytest
29 | jinja2==3.1.6 # via ansible-core, mkdocs, mkdocs-macros-plugin, mkdocs-material, mkdocstrings
30 | jsmin==3.0.1 # via mkdocs-minify-plugin
31 | jsonschema==4.23.0 # via ansible-compat (pyproject.toml)
32 | jsonschema-specifications==2025.4.1 # via jsonschema
33 | linkchecker==10.5.0 # via mkdocs-ansible
34 | markdown==3.8 # via markdown-include, mkdocs, mkdocs-autorefs, mkdocs-htmlproofer-plugin, mkdocs-material, mkdocstrings, pymdown-extensions
35 | markdown-exec==1.10.3 # via mkdocs-ansible
36 | markdown-include==0.8.1 # via mkdocs-ansible
37 | markupsafe==3.0.2 # via jinja2, mkdocs, mkdocs-autorefs, mkdocstrings
38 | mergedeep==1.3.4 # via mkdocs, mkdocs-get-deps
39 | mkdocs==1.6.1 # via mkdocs-ansible, mkdocs-autorefs, mkdocs-gen-files, mkdocs-htmlproofer-plugin, mkdocs-macros-plugin, mkdocs-material, mkdocs-minify-plugin, mkdocstrings
40 | mkdocs-ansible==25.5.0 # via ansible-compat (pyproject.toml)
41 | mkdocs-autorefs==1.4.1 # via mkdocstrings, mkdocstrings-python
42 | mkdocs-gen-files==0.5.0 # via mkdocs-ansible
43 | mkdocs-get-deps==0.2.0 # via mkdocs
44 | mkdocs-htmlproofer-plugin==1.3.0 # via mkdocs-ansible
45 | mkdocs-macros-plugin==1.3.7 # via mkdocs-ansible
46 | mkdocs-material==9.6.14 # via mkdocs-ansible
47 | mkdocs-material-extensions==1.3.1 # via mkdocs-ansible, mkdocs-material
48 | mkdocs-minify-plugin==0.8.0 # via mkdocs-ansible
49 | mkdocstrings==0.29.1 # via mkdocs-ansible, mkdocstrings-python
50 | mkdocstrings-python==1.16.10 # via mkdocs-ansible
51 | mypy-extensions==1.1.0 # via black
52 | packaging==25.0 # via ansible-core, black, mkdocs, mkdocs-macros-plugin, pytest, ansible-compat (pyproject.toml)
53 | paginate==0.5.7 # via mkdocs-material
54 | pathspec==0.12.1 # via black, mkdocs, mkdocs-macros-plugin
55 | pillow==11.2.1 # via cairosvg, mkdocs-ansible
56 | platformdirs==4.3.8 # via black, mkdocs-get-deps
57 | pluggy==1.6.0 # via pytest
58 | pycparser==2.22 # via cffi
59 | pygments==2.19.1 # via mkdocs-material
60 | pymdown-extensions==10.15 # via markdown-exec, mkdocs-ansible, mkdocs-material, mkdocstrings
61 | pytest==8.3.5 # via pytest-instafail, pytest-mock, pytest-plus, ansible-compat (pyproject.toml)
62 | pytest-instafail==0.5.0 # via ansible-compat (pyproject.toml)
63 | pytest-mock==3.14.0 # via ansible-compat (pyproject.toml)
64 | pytest-plus==0.8.1 # via ansible-compat (pyproject.toml)
65 | python-dateutil==2.9.0.post0 # via ghp-import, mkdocs-macros-plugin
66 | pyyaml==6.0.2 # via ansible-core, mkdocs, mkdocs-get-deps, mkdocs-macros-plugin, pymdown-extensions, pyyaml-env-tag, ansible-compat (pyproject.toml)
67 | pyyaml-env-tag==1.1 # via mkdocs
68 | referencing==0.36.2 # via jsonschema, jsonschema-specifications
69 | requests==2.32.3 # via linkchecker, mkdocs-htmlproofer-plugin, mkdocs-material
70 | rpds-py==0.25.0 # via jsonschema, referencing
71 | six==1.17.0 # via python-dateutil
72 | soupsieve==2.7 # via beautifulsoup4
73 | subprocess-tee==0.4.2 # via ansible-compat (pyproject.toml)
74 | super-collections==0.5.3 # via mkdocs-macros-plugin
75 | termcolor==3.1.0 # via mkdocs-macros-plugin
76 | tinycss2==1.4.0 # via cairosvg, cssselect2
77 | tomli==2.2.1 # via argparse-manpage, black, pytest
78 | urllib3==2.4.0 # via requests
79 | watchdog==6.0.0 # via mkdocs
80 | webencodings==0.5.1 # via cssselect2, tinycss2
81 |
82 | # The following packages were excluded from the output:
83 | # ansible-core
84 | # pip
85 | # resolvelib
86 | # typing-extensions
87 | # uv
88 |
--------------------------------------------------------------------------------
/.config/python3-ansible-compat.spec:
--------------------------------------------------------------------------------
1 | # spell-checker:ignore bcond pkgversion buildrequires autosetup PYTHONPATH noarch buildroot bindir sitelib numprocesses clib
2 | # All tests require Internet access
3 | # to test in mock use: --enable-network --with check
4 | # to test in a privileged environment use:
5 | # --with check --with privileged_tests
6 | %bcond_with check
7 | %bcond_with privileged_tests
8 |
9 | Name: ansible-compat
10 | Version: VERSION_PLACEHOLDER
11 | Release: 1%{?dist}
12 | Summary: Ansible-compat library
13 |
14 | License: GPL-3.0-or-later
15 | URL: https://github.com/ansible/ansible-compat
16 | Source0: %{pypi_source}
17 |
18 | BuildArch: noarch
19 |
20 | BuildRequires: python%{python3_pkgversion}-devel
21 | %if %{with check}
22 | # These are required for tests:
23 | BuildRequires: python%{python3_pkgversion}-pytest
24 | BuildRequires: python%{python3_pkgversion}-pytest-xdist
25 | BuildRequires: python%{python3_pkgversion}-libselinux
26 | BuildRequires: git-core
27 | %endif
28 | Requires: git-core
29 |
30 |
31 | %description
32 | Ansible-compat.
33 |
34 | %prep
35 | %autosetup
36 |
37 |
38 | %generate_buildrequires
39 | %pyproject_buildrequires
40 |
41 |
42 | %build
43 | %pyproject_wheel
44 |
45 |
46 | %install
47 | %pyproject_install
48 | %pyproject_save_files ansible_compat
49 |
50 |
51 | %check
52 | %pyproject_check_import
53 | %if %{with check}
54 | %pytest \
55 | -v \
56 | --disable-pytest-warnings \
57 | --numprocesses=auto \
58 | test
59 | %endif
60 |
61 |
62 | %files -f %{pyproject_files}
63 | %license LICENSE
64 | %doc docs/ README.md
65 |
66 | %changelog
67 |
--------------------------------------------------------------------------------
/.config/requirements-docs.in:
--------------------------------------------------------------------------------
1 | argparse-manpage
2 | black
3 | mkdocs-ansible>=24.3.1
4 |
--------------------------------------------------------------------------------
/.config/requirements-test.in:
--------------------------------------------------------------------------------
1 | coverage
2 | pip
3 | pytest-instafail
4 | pytest-mock
5 | pytest-plus>=0.6.1
6 | pytest>=7.2.0
7 | uv>=0.4.30
8 |
--------------------------------------------------------------------------------
/.config/requirements.in:
--------------------------------------------------------------------------------
1 | # https://docs.ansible.com/ansible/latest/reference_appendices/release_and_maintenance.html
2 | ansible-core>=2.16
3 | packaging
4 | PyYAML
5 | subprocess-tee>=0.4.1
6 | jsonschema>=4.6.0
7 |
--------------------------------------------------------------------------------
/.git_archival.txt:
--------------------------------------------------------------------------------
1 | node: af6d5a8fed201502be3a4574072a4ebba857e015
2 | node-date: 2025-05-19T06:24:03Z
3 | describe-name: v25.5.0-3-gaf6d5a8f
4 | ref-names: HEAD -> main
5 |
--------------------------------------------------------------------------------
/.gitattributes:
--------------------------------------------------------------------------------
1 | # Force LF line endings for text files
2 | * text=auto eol=lf
3 |
4 | *.png binary
5 |
6 | # Needed for setuptools-scm-git-archive
7 | .git_archival.txt export-subst
8 |
--------------------------------------------------------------------------------
/.github/CODEOWNERS:
--------------------------------------------------------------------------------
1 | * @ansible/devtools
2 |
--------------------------------------------------------------------------------
/.github/CODE_OF_CONDUCT.md:
--------------------------------------------------------------------------------
1 | # Community Code of Conduct
2 |
3 | Please see the official
4 | [Ansible Community Code of Conduct](https://docs.ansible.com/ansible/latest/community/code_of_conduct.html).
5 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/bug_report.md:
--------------------------------------------------------------------------------
1 | ---
2 | name: Bug report
3 | about: >
4 | Create a bug report. Ensure that it does reproduce on the main branch with
5 | python >=3.10. For anything else, please use the discussion link below.
6 | labels: bug, new
7 | ---
8 |
9 |
10 |
11 |
12 | ##### Summary
13 |
14 |
15 |
16 | ##### Issue Type
17 |
18 | - Bug Report
19 |
20 | ##### OS / ENVIRONMENT
21 |
22 |
23 |
24 | ```console (paste below)
25 | python -c "import ansible_compat; print(ansible_compat.__version__)"
26 | ```
27 |
28 |
30 |
31 | - ansible installation method: one of source, pip, OS package
32 | - ansible-compat installation method: one of source, pip, OS package
33 |
34 | ##### STEPS TO REPRODUCE
35 |
36 |
37 |
38 |
39 |
40 | ```console (paste below)
41 |
42 | ```
43 |
44 |
45 |
46 | ##### Desired Behavior
47 |
48 |
49 |
50 | Possible security bugs should be reported via email to `security@ansible.com`
51 |
52 | ##### Actual Behavior
53 |
54 |
55 |
56 | Please give some details of what is happening. Include a [minimum complete
57 | verifiable example].
58 |
59 |
60 |
61 | ```paste below
62 |
63 | ```
64 |
65 | [minimum complete verifiable example]: http://stackoverflow.com/help/mcve
66 |
--------------------------------------------------------------------------------
/.github/dependabot.yml:
--------------------------------------------------------------------------------
1 | ---
2 | version: 2
3 | updates:
4 | - package-ecosystem: pip
5 | directory: /.config/
6 | schedule:
7 | day: sunday
8 | interval: weekly
9 | labels:
10 | - dependabot-deps-updates
11 | - skip-changelog
12 | groups:
13 | dependencies:
14 | patterns:
15 | - "*"
16 | - package-ecosystem: "github-actions"
17 | directory: "/"
18 | schedule:
19 | interval: daily
20 | labels:
21 | - "dependencies"
22 | - "skip-changelog"
23 |
--------------------------------------------------------------------------------
/.github/release-drafter.yml:
--------------------------------------------------------------------------------
1 | ---
2 | # see https://github.com/ansible/team-devtools
3 | _extends: ansible/team-devtools
4 |
--------------------------------------------------------------------------------
/.github/workflows/ack.yml:
--------------------------------------------------------------------------------
1 | # See https://github.com/ansible/team-devtools/blob/main/.github/workflows/ack.yml
2 | name: ack
3 | on:
4 | pull_request_target:
5 | types: [opened, labeled, unlabeled, synchronize]
6 |
7 | jobs:
8 | ack:
9 | uses: ansible/team-devtools/.github/workflows/ack.yml@main
10 | secrets: inherit
11 |
--------------------------------------------------------------------------------
/.github/workflows/push.yml:
--------------------------------------------------------------------------------
1 | # See https://github.com/ansible/team-devtools/blob/main/.github/workflows/push.yml
2 | name: push
3 | on:
4 | push:
5 | branches:
6 | - main
7 | - "releases/**"
8 | - "stable/**"
9 |
10 | jobs:
11 | ack:
12 | uses: ansible/team-devtools/.github/workflows/push.yml@main
13 |
--------------------------------------------------------------------------------
/.github/workflows/release.yml:
--------------------------------------------------------------------------------
1 | name: release
2 |
3 | on:
4 | release:
5 | types: [published]
6 |
7 | jobs:
8 | release:
9 | name: release ${{ github.event.ref }}
10 | # unable to use environment with uses/with, basically cannot reuse release pipelines
11 | environment: release
12 | runs-on: ubuntu-24.04
13 | permissions:
14 | id-token: write
15 |
16 | env:
17 | FORCE_COLOR: 1
18 | PY_COLORS: 1
19 | TOX_PARALLEL_NO_SPINNER: 1
20 |
21 | steps:
22 | - name: Switch to using Python 3.12 by default
23 | uses: actions/setup-python@v5
24 | with:
25 | python-version: "3.12"
26 | - name: Install tox
27 | run: python3 -m pip install --user "tox>=4.0.0"
28 | - name: Check out src from Git
29 | uses: actions/checkout@v4
30 | with:
31 | fetch-depth: 0 # needed by setuptools-scm
32 | - name: Build dists
33 | run: python3 -m tox -e pkg
34 | - name: Publish to pypi.org
35 | uses: pypa/gh-action-pypi-publish@unstable/v1
36 |
--------------------------------------------------------------------------------
/.github/workflows/tox.yml:
--------------------------------------------------------------------------------
1 | ---
2 | name: tox
3 |
4 | on:
5 | merge_group:
6 | branches:
7 | - "main"
8 | push:
9 | branches:
10 | - "main"
11 | - "releases/**"
12 | - "stable/**"
13 | pull_request:
14 | branches:
15 | - "main"
16 | workflow_call:
17 |
18 | concurrency:
19 | group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.sha }}
20 | cancel-in-progress: true
21 |
22 | jobs:
23 | tox:
24 | uses: ansible/team-devtools/.github/workflows/tox.yml@main
25 | with:
26 | jobs_producing_coverage: 9
27 | other_names: |
28 | docs
29 | lint
30 | pkg
31 | py310-ansible217
32 | py312-ansible216
33 | py312-ansible217
34 | py312-ansible218
35 | py312-devel
36 | py313-ansible218
37 | py313-devel
38 | py310-macos:tox -e py310
39 | py313-macos:tox -e py313
40 | smoke
41 | skip_explode: "1"
42 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Byte-compiled / optimized / DLL files
2 | __pycache__/
3 | *.py[cod]
4 | *$py.class
5 |
6 | # C extensions
7 | *.so
8 |
9 | # Distribution / packaging
10 | .Python
11 | build/
12 | develop-eggs/
13 | dist/
14 | downloads/
15 | eggs/
16 | .eggs/
17 | lib/
18 | lib64/
19 | parts/
20 | sdist/
21 | var/
22 | wheels/
23 | pip-wheel-metadata/
24 | share/python-wheels/
25 | *.egg-info/
26 | .installed.cfg
27 | *.egg
28 | MANIFEST
29 |
30 | # PyInstaller
31 | # Usually these files are written by a python script from a template
32 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
33 | *.manifest
34 | /*.spec
35 | rpm/*.spec
36 | *.rpm
37 |
38 | # Installer logs
39 | pip-log.txt
40 | pip-delete-this-directory.txt
41 |
42 | # Unit test / coverage reports
43 | htmlcov/
44 | .tox/
45 | .nox/
46 | .coverage
47 | .cache
48 | junit.xml
49 | nosetests.xml
50 | coverage.xml
51 | *.cover
52 | *.py,cover
53 | .hypothesis/
54 | .pytest_cache/
55 |
56 | # Translations
57 | *.mo
58 | *.pot
59 |
60 | # Django stuff:
61 | *.log
62 | local_settings.py
63 | db.sqlite3
64 | db.sqlite3-journal
65 |
66 | # Flask stuff:
67 | instance/
68 | .webassets-cache
69 |
70 | # Scrapy stuff:
71 | .scrapy
72 |
73 | # Sphinx documentation
74 | docs/_build/
75 |
76 | # PyBuilder
77 | target/
78 |
79 | # Jupyter Notebook
80 | .ipynb_checkpoints
81 |
82 | # IPython
83 | profile_default/
84 | ipython_config.py
85 |
86 | # pyenv
87 | .python-version
88 |
89 | # pipenv
90 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
91 | # However, in case of collaboration, if having platform-specific dependencies or dependencies
92 | # having no cross-platform support, pipenv may install dependencies that don't work, or not
93 | # install all needed dependencies.
94 | #Pipfile.lock
95 |
96 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow
97 | __pypackages__/
98 |
99 | # Celery stuff
100 | celerybeat-schedule
101 | celerybeat.pid
102 |
103 | # SageMath parsed files
104 | *.sage.py
105 |
106 | # Environments
107 | .env
108 | .venv
109 | env/
110 | venv/
111 | ENV/
112 | env.bak/
113 | venv.bak/
114 |
115 | # Spyder project settings
116 | .spyderproject
117 | .spyproject
118 |
119 | # Rope project settings
120 | .ropeproject
121 |
122 | # mkdocs documentation
123 | /site
124 |
125 | # mypy
126 | .mypy_cache/
127 | .dmypy.json
128 | dmypy.json
129 |
130 | # Pyre type checker
131 | .pyre/
132 | .test-results
133 | *.lcov
134 | ansible_collections
135 |
136 | # Generated by setuptools-scm
137 | src/ansible_compat/_version.py
138 | node_modules
139 | _readthedocs
140 | test/roles/acme.missing_deps/.ansible
141 | .ansible
142 |
--------------------------------------------------------------------------------
/.packit.yaml:
--------------------------------------------------------------------------------
1 | ---
2 | # https://packit.dev/docs/configuration/
3 | # Test locally running: packit build locally
4 | # spell-checker:ignore packit specfile copr epel
5 | specfile_path: dist/python3-ansible-compat.spec
6 | actions:
7 | create-archive:
8 | - sh -c "rm dist/*.tar.gz || true"
9 | - python3 -m build --sdist --outdir dist
10 | - sh -c "ls dist/ansible_compat-*.tar.gz"
11 | get-current-version:
12 | - ./tools/get-version.sh
13 | post-upstream-clone:
14 | - ./tools/update-spec.sh
15 | srpm_build_deps:
16 | - python3-build
17 | - python3-setuptools_scm
18 | - python3-pytest
19 | - python3-pytest-mock
20 | jobs:
21 | - job: copr_build
22 | trigger: pull_request
23 | branch: main
24 | require:
25 | label:
26 | present:
27 | - bug
28 | - dependencies
29 | - enhancement
30 | - major
31 | - minor
32 | absent:
33 | - skip-changelog
34 | targets:
35 | - fedora-rawhide-x86_64
36 | - fedora-rawhide-aarch64
37 | - fedora-latest-x86_64
38 | - fedora-latest-aarch64
39 | # Missing python3-build see https://bugzilla.redhat.com/show_bug.cgi?id=2129071
40 | # - centos-stream-9-aarch64
41 | # - centos-stream-9-x86_64
42 | - job: tests
43 | trigger: pull_request
44 | branch: main
45 | require:
46 | label:
47 | present:
48 | - bug
49 | - dependencies
50 | - enhancement
51 | - major
52 | - minor
53 | absent:
54 | - skip-changelog
55 | targets:
56 | - fedora-latest
57 | - fedora-rawhide
58 | # - job: propose_downstream
59 | # trigger: release
60 | # metadata:
61 | # dist-git-branch: master
62 | notifications:
63 | pull_request:
64 | successful_build: false
65 |
--------------------------------------------------------------------------------
/.pre-commit-config.yaml:
--------------------------------------------------------------------------------
1 | ---
2 | ci:
3 | # format compatible with commitlint
4 | autoupdate_commit_msg: "chore: pre-commit autoupdate"
5 | autoupdate_schedule: monthly
6 | autofix_commit_msg: "chore: auto fixes from pre-commit.com hooks"
7 | skip:
8 | # https://github.com/pre-commit-ci/issues/issues/55
9 | - ccv
10 | - pip-compile
11 | default_language_version:
12 | # Needed in order to make pip-compile output predictable.
13 | python: python3.10
14 | exclude: |
15 | (?x)^(
16 | test/assets/.*
17 | )$
18 | repos:
19 | - repo: https://github.com/astral-sh/ruff-pre-commit
20 | rev: "v0.11.8"
21 | hooks:
22 | - id: ruff
23 | args: [--fix, --exit-non-zero-on-fix]
24 | # https://github.com/pappasam/toml-sort/issues/69
25 | # - repo: https://github.com/pappasam/toml-sort
26 | # rev: v0.23.1
27 | # hooks:
28 | # - id: toml-sort-fix
29 | - repo: https://github.com/rbubley/mirrors-prettier
30 | # keep it before yamllint
31 | rev: "v3.5.3"
32 | hooks:
33 | - id: prettier
34 | additional_dependencies:
35 | - prettier
36 | - prettier-plugin-toml
37 | - prettier-plugin-sort-json
38 | - repo: https://github.com/pre-commit/pre-commit-hooks.git
39 | rev: v5.0.0
40 | hooks:
41 | - id: end-of-file-fixer
42 | - id: trailing-whitespace
43 | exclude: >
44 | (?x)^(
45 | examples/playbooks/(with-skip-tag-id|unicode).yml|
46 | examples/playbooks/example.yml
47 | )$
48 | - id: mixed-line-ending
49 | - id: fix-byte-order-marker
50 | - id: check-executables-have-shebangs
51 | - id: check-merge-conflict
52 | - id: debug-statements
53 | language_version: python3
54 | - repo: https://github.com/codespell-project/codespell
55 | rev: v2.4.1
56 | hooks:
57 | - id: codespell
58 | - repo: https://github.com/jsh9/pydoclint
59 | rev: 0.6.6
60 | hooks:
61 | - id: pydoclint
62 | # This allows automatic reduction of the baseline file when needed.
63 | entry: sh -ec "pydoclint . && pydoclint --generate-baseline=1 ."
64 | pass_filenames: false
65 | - repo: https://github.com/adrienverge/yamllint.git
66 | rev: v1.37.1
67 | hooks:
68 | - id: yamllint
69 | files: \.(yaml|yml)$
70 | types: [file, yaml]
71 | entry: yamllint --strict
72 | - repo: https://github.com/pappasam/toml-sort
73 | rev: v0.24.2
74 | hooks:
75 | - id: toml-sort-fix
76 | - repo: https://github.com/psf/black
77 | rev: 25.1.0
78 | hooks:
79 | - id: black
80 | language_version: python3
81 | - repo: https://github.com/pre-commit/mirrors-mypy
82 | rev: v1.15.0
83 | hooks:
84 | - id: mypy
85 | # empty args needed in order to match mypy cli behavior
86 | args: ["--strict"]
87 | additional_dependencies:
88 | - ansible-core>=2.16.0
89 | - cached_property
90 | - packaging
91 | - pytest
92 | - pytest-mock
93 | - subprocess-tee>=0.4.1
94 | - "typing-extensions>=4.5.0;python_version<'3.10'"
95 | - types-PyYAML
96 | - types-setuptools
97 | - types-jsonschema>=4.4.9
98 | - repo: https://github.com/pycqa/pylint
99 | rev: v3.3.7
100 | hooks:
101 | - id: pylint
102 | additional_dependencies:
103 | - PyYAML
104 | - pytest
105 | - typing_extensions
106 | # Keep last due to being considerably slower than the others:
107 | - repo: local
108 | hooks:
109 | - id: pip-compile-upgrade
110 | # To run it execute: `pre-commit run pip-compile-upgrade --hook-stage manual`
111 | name: Upgrade constraints files and requirements
112 | files: ^(pyproject\.toml|requirements\.txt)$
113 | language: python
114 | entry: python3 -m uv pip compile -q --all-extras --output-file=.config/constraints.txt pyproject.toml --upgrade
115 | pass_filenames: false
116 | stages:
117 | - manual
118 | additional_dependencies:
119 | - uv>=0.4.3
120 | - id: pip-compile
121 | name: Check constraints files and requirements
122 | files: ^(pyproject\.toml|requirements\.txt)$
123 | language: python
124 | entry: python3 -m uv pip compile -q --all-extras --output-file=.config/constraints.txt pyproject.toml
125 | pass_filenames: false
126 | additional_dependencies:
127 | - uv>=0.4.3
128 | - repo: https://github.com/mashi/codecov-validator
129 | rev: "1.0.1"
130 | hooks:
131 | - id: ccv
132 | name: codecov
133 |
--------------------------------------------------------------------------------
/.prettierignore:
--------------------------------------------------------------------------------
1 | test/assets/
2 |
3 | # Generated by setuptools-scm
4 | src/ansible_compat/_version.py
5 |
--------------------------------------------------------------------------------
/.prettierrc.yaml:
--------------------------------------------------------------------------------
1 | ---
2 | proseWrap: always
3 | jsonRecursiveSort: true # prettier-plugin-sort-json
4 | tabWidth: 2
5 | useTabs: false
6 | overrides:
7 | - files:
8 | - "*.md"
9 | options:
10 | # compatibility with markdownlint
11 | proseWrap: always
12 | printWidth: 80
13 | - files:
14 | - "*.yaml"
15 | - "*.yml"
16 | options:
17 | # compatibility with yamllint
18 | proseWrap: preserve
19 |
--------------------------------------------------------------------------------
/.python-version:
--------------------------------------------------------------------------------
1 | 3.10
2 |
3 | # Needed by dependabot, see https://github.com/dependabot/dependabot-core/issues/1455
4 |
--------------------------------------------------------------------------------
/.readthedocs.yml:
--------------------------------------------------------------------------------
1 | ---
2 | version: 2
3 |
4 | mkdocs:
5 | fail_on_warning: true
6 | configuration: mkdocs.yml
7 |
8 | build:
9 | os: ubuntu-24.04
10 | tools:
11 | python: "3.11"
12 | commands:
13 | - pip install --user tox
14 | - python3 -m tox -e docs
15 | python:
16 | install:
17 | - method: pip
18 | path: tox
19 | - method: pip
20 | path: .
21 | extra_requirements:
22 | - docs
23 | submodules:
24 | include: all
25 | recursive: true
26 |
--------------------------------------------------------------------------------
/.sonarcloud.properties:
--------------------------------------------------------------------------------
1 | sonar.python.version=3.10, 3.11, 3.12, 3.13
2 | sonar.sources=src/
3 | sonar.tests=test/
4 |
--------------------------------------------------------------------------------
/.taplo.toml:
--------------------------------------------------------------------------------
1 | [formatting]
2 | # compatibility between toml-sort-fix pre-commit hook and panekj.even-betterer-toml extension
3 | align_comments = false
4 | array_trailing_comma = false
5 | compact_arrays = true
6 | compact_entries = false
7 | compact_inline_tables = true
8 | inline_table_expand = false
9 | reorder_keys = true
10 |
--------------------------------------------------------------------------------
/.vscode/extensions.json:
--------------------------------------------------------------------------------
1 | {
2 | "recommendations": [
3 | "charliermarsh.ruff",
4 | "esbenp.prettier-vscode",
5 | "gruntfuggly.triggertaskonsave",
6 | "markis.code-coverage",
7 | "ms-python.black-formatter",
8 | "ms-python.debugpy",
9 | "ms-python.mypy-type-checker",
10 | "ms-python.pylint",
11 | "ms-python.python",
12 | "sonarsource.sonarlint-vscode",
13 | "streetsidesoftware.code-spell-checker"
14 | ]
15 | }
16 |
--------------------------------------------------------------------------------
/.vscode/settings.json:
--------------------------------------------------------------------------------
1 | {
2 | "[json]": {
3 | "editor.defaultFormatter": "esbenp.prettier-vscode"
4 | },
5 | "[jsonc]": {
6 | "editor.defaultFormatter": "esbenp.prettier-vscode"
7 | },
8 | "[markdown]": {
9 | "editor.defaultFormatter": "esbenp.prettier-vscode"
10 | },
11 | "[python]": {
12 | "editor.codeActionsOnSave": {
13 | "source.fixAll": "explicit",
14 | "source.organizeImports": "explicit"
15 | },
16 | "editor.defaultFormatter": "ms-python.black-formatter",
17 | "editor.formatOnSave": true
18 | },
19 | "[toml]": {
20 | "editor.defaultFormatter": "panekj.even-betterer-toml"
21 | },
22 | "editor.formatOnSave": true,
23 | "evenBetterToml.formatter.alignComments": false,
24 | "evenBetterToml.formatter.arrayTrailingComma": true,
25 | "files.exclude": {
26 | "*.egg-info": true,
27 | ".pytest_cache": true,
28 | ".tox": true,
29 | "__pycache__": true,
30 | "build": true
31 | },
32 | "git.ignoreLimitWarning": true,
33 | "grammarly.domain": "technical",
34 | "grammarly.files.include": ["**/*.txt", "**/*.md"],
35 | "grammarly.hideUnavailablePremiumAlerts": true,
36 | "grammarly.showExamples": true,
37 | "markiscodecoverage.searchCriteria": "coverage.lcov",
38 | "mypy-type-checker.importStrategy": "fromEnvironment",
39 | "mypy-type-checker.preferDaemon": true,
40 | "mypy-type-checker.reportingScope": "workspace",
41 | "python.analysis.exclude": ["build"],
42 | "python.terminal.activateEnvironment": true,
43 | "python.testing.pytestArgs": ["tests"],
44 | "python.testing.pytestEnabled": true,
45 | "python.testing.unittestEnabled": false,
46 | "sortLines.filterBlankLines": true,
47 | "yaml.completion": true,
48 | "yaml.customTags": ["!encrypted/pkcs1-oaep scalar", "!vault scalar"],
49 | "yaml.format.enable": false,
50 | "yaml.validate": true
51 | }
52 |
--------------------------------------------------------------------------------
/.yamllint:
--------------------------------------------------------------------------------
1 | rules:
2 | document-start: disable
3 | indentation:
4 | level: error
5 | indent-sequences: consistent
6 | ignore: |
7 | .tox
8 | # ignore added because this file includes on-purpose errors
9 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2021 Community managed Ansible repositories
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # ansible-compat
2 |
3 | [](https://pypi.org/project/ansible-compat/)
4 | [](https://ansible.readthedocs.io/projects/compat/)
5 | [](https://github.com/ansible/ansible-compat/actions/workflows/tox.yml)
6 | [](https://codecov.io/github/ansible/ansible-compat?branch=main)
7 |
8 | A python package contains functions that facilitate working with various
9 | versions of Ansible.
10 |
11 | ## Documentation
12 |
13 | Documentation is available at
14 | [ansible.readthedocs.io/projects/compat/](https://ansible.readthedocs.io/projects/compat/).
15 |
16 | ## Communication
17 |
18 | Join the Ansible forum to ask questions, get help, and interact with the
19 | community.
20 |
21 | - [Get Help](https://forum.ansible.com/c/help/6): get help or help others.
22 | Please add appropriate tags if you start new discussions.
23 | - [Social Spaces](https://forum.ansible.com/c/chat/4): meet and interact with
24 | fellow enthusiasts.
25 | - [News & Announcements](https://forum.ansible.com/c/news/5): track project-wide
26 | announcements including social events.
27 |
28 | To get release announcements and important changes from the community, see the
29 | [Bullhorn newsletter](https://docs.ansible.com/ansible/devel/community/communication.html#the-bullhorn).
30 |
31 | For more information about getting in touch, see the
32 | [Ansible communication guide](https://docs.ansible.com/ansible/devel/community/communication.html).
33 |
34 | ## Code of Conduct
35 |
36 | Please see the official
37 | [Ansible Community Code of Conduct](https://docs.ansible.com/ansible/latest/community/code_of_conduct.html).
38 |
--------------------------------------------------------------------------------
/ansible.cfg:
--------------------------------------------------------------------------------
1 | [defaults]
2 | # isolate testing of ansible-compat from user local setup
3 | collections_path = .
4 |
--------------------------------------------------------------------------------
/codecov.yml:
--------------------------------------------------------------------------------
1 | codecov:
2 | require_ci_to_pass: true
3 | comment: false
4 | coverage:
5 | status:
6 | patch: true # we want github annotations
7 |
--------------------------------------------------------------------------------
/docs/api.md:
--------------------------------------------------------------------------------
1 | # API
2 |
3 | ::: ansible_compat.config
4 |
5 | ::: ansible_compat.errors
6 |
7 | ::: ansible_compat.loaders
8 |
9 | ::: ansible_compat.prerun
10 |
11 | ::: ansible_compat.runtime
12 |
13 | ::: ansible_compat.schema
14 |
--------------------------------------------------------------------------------
/docs/images/favicon.ico:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ansible/ansible-compat/af6d5a8fed201502be3a4574072a4ebba857e015/docs/images/favicon.ico
--------------------------------------------------------------------------------
/docs/images/logo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ansible/ansible-compat/af6d5a8fed201502be3a4574072a4ebba857e015/docs/images/logo.png
--------------------------------------------------------------------------------
/docs/images/logo.svg:
--------------------------------------------------------------------------------
1 |
2 |
3 |
8 |
--------------------------------------------------------------------------------
/docs/index.md:
--------------------------------------------------------------------------------
1 | # Examples
2 |
3 | ## Using Ansible runtime
4 |
5 | ```python title="example.py"
6 | {!../test/test_runtime_example.py!}
7 | ```
8 |
9 | ## Access to Ansible configuration
10 |
11 | As you may not want to parse `ansible-config dump` yourself, you can make use of
12 | a simple python class that facilitates access to it, using python data types.
13 |
14 | ```python
15 | {!../test/test_configuration_example.py!}
16 | ```
17 |
--------------------------------------------------------------------------------
/examples/reqs_broken/requirements.yml:
--------------------------------------------------------------------------------
1 | roles: []
2 | collections: []
3 | integration_tests_dependencies: [] # <-- invalid key
4 | unit_tests_dependencies: [] # <-- invalid key
5 |
--------------------------------------------------------------------------------
/examples/reqs_v1/requirements.yml:
--------------------------------------------------------------------------------
1 | # v1 requirements test file
2 | # ansible-galaxy role install -r requirements.yml -p roles
3 | - src: git+https://github.com/geerlingguy/ansible-role-docker.git
4 | name: geerlingguy.mysql
5 |
--------------------------------------------------------------------------------
/examples/reqs_v2/community-molecule-0.1.0.tar.gz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ansible/ansible-compat/af6d5a8fed201502be3a4574072a4ebba857e015/examples/reqs_v2/community-molecule-0.1.0.tar.gz
--------------------------------------------------------------------------------
/examples/reqs_v2/requirements.yml:
--------------------------------------------------------------------------------
1 | ---
2 | # For local install run:
3 | # ansible-galaxy role install -r requirements.yml -p roles
4 | # ansible-galaxy collection install -r requirements.yml -p collections
5 | #
6 | # Current test file avoids using galaxy server on purpose, for resiliency
7 | roles:
8 | - src: git+https://github.com/geerlingguy/ansible-role-docker.git
9 | name: geerlingguy.mysql
10 | collections:
11 | - name: community-molecule-0.1.0.tar.gz
12 | # Also needed for testing purposes as this should trigger addition of --pre
13 | # argument as this is required due to
14 | # https://github.com/ansible/ansible-lint/issues/3686
15 | # https://github.com/ansible/ansible/issues/79109
16 | - name: https://github.com/ansible-collections/amazon.aws.git
17 | type: git
18 | version: main
19 | - name: https://github.com/ansible-collections/community.aws.git
20 | type: git
21 | version: main
22 |
--------------------------------------------------------------------------------
/mise.toml:
--------------------------------------------------------------------------------
1 | [settings]
2 | idiomatic_version_file_disable_tools = ["python"]
3 |
--------------------------------------------------------------------------------
/mkdocs.yml:
--------------------------------------------------------------------------------
1 | ---
2 | site_name: Ansible Compat Library
3 | site_url: https://ansible.readthedocs.io/projects/compat/
4 | repo_url: https://github.com/ansible/ansible-compat
5 | edit_uri: blob/main/docs/
6 | copyright: Copyright © 2023 Red Hat, Inc.
7 | docs_dir: docs
8 | # strict: true
9 | watch:
10 | - mkdocs.yml
11 | - src
12 | - docs
13 |
14 | theme:
15 | name: ansible
16 | features:
17 | - content.code.copy
18 | - content.action.edit
19 | - navigation.expand
20 | - navigation.sections
21 | - navigation.instant
22 | - navigation.indexes
23 | - navigation.tracking
24 | - toc.integrate
25 | extra:
26 | social:
27 | - icon: fontawesome/brands/github-alt
28 | link: https://github.com/ansible/ansible-compat
29 | name: GitHub
30 | - icon: fontawesome/brands/discourse
31 | link: https://forum.ansible.com/c/project/7
32 | name: Ansible forum
33 | nav:
34 | - examples: index.md
35 | - api: api.md
36 | plugins:
37 | - autorefs
38 | - search
39 | - material/social
40 | - material/tags
41 | - mkdocstrings:
42 | handlers:
43 | python:
44 | inventories:
45 | - https://docs.python.org/3/objects.inv
46 | options:
47 | # heading_level: 2
48 | docstring_style: google
49 | docstring_options:
50 | ignore_init_summary: yes
51 |
52 | show_submodules: no
53 | docstring_section_style: list
54 | members_order: alphabetical
55 | show_category_heading: no
56 | # cannot merge init into class due to parse error...
57 | # merge_init_into_class: yes
58 | # separate_signature: yes
59 | show_root_heading: yes
60 | show_signature_annotations: yes
61 | separate_signature: yes
62 | # show_bases: false
63 | # options:
64 | # show_root_heading: true
65 | # docstring_style: sphinx
66 |
67 | markdown_extensions:
68 | - markdown_include.include:
69 | base_path: docs
70 | - admonition
71 | - def_list
72 | - footnotes
73 | - pymdownx.highlight:
74 | anchor_linenums: true
75 | - pymdownx.inlinehilite
76 | - pymdownx.superfences
77 | - pymdownx.magiclink:
78 | repo_url_shortener: true
79 | repo_url_shorthand: true
80 | social_url_shorthand: true
81 | social_url_shortener: true
82 | user: facelessuser
83 | repo: pymdown-extensions
84 | normalize_issue_symbols: true
85 | - pymdownx.tabbed:
86 | alternate_style: true
87 | - toc:
88 | toc_depth: 2
89 | permalink: true
90 | - pymdownx.superfences:
91 | custom_fences:
92 | - name: mermaid
93 | class: mermaid
94 | format: ""
95 |
--------------------------------------------------------------------------------
/package-lock.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "ansible-compat",
3 | "lockfileVersion": 3,
4 | "requires": true,
5 | "packages": {
6 | "": {
7 | "devDependencies": {
8 | "prettier": "^3.4.2"
9 | }
10 | },
11 | "node_modules/prettier": {
12 | "version": "3.4.2",
13 | "resolved": "https://registry.npmjs.org/prettier/-/prettier-3.4.2.tgz",
14 | "integrity": "sha512-e9MewbtFo+Fevyuxn/4rrcDAaq0IYxPGLvObpQjiZBMAzB9IGmzlnG9RZy3FFas+eBMu2vA0CszMeduow5dIuQ==",
15 | "dev": true,
16 | "license": "MIT",
17 | "bin": {
18 | "prettier": "bin/prettier.cjs"
19 | },
20 | "engines": {
21 | "node": ">=14"
22 | },
23 | "funding": {
24 | "url": "https://github.com/prettier/prettier?sponsor=1"
25 | }
26 | }
27 | }
28 | }
29 |
--------------------------------------------------------------------------------
/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "devDependencies": {
3 | "prettier": "^3.4.2"
4 | }
5 | }
6 |
--------------------------------------------------------------------------------
/pyproject.toml:
--------------------------------------------------------------------------------
1 | [build-system]
2 | build-backend = "setuptools.build_meta"
3 | requires = [
4 | "setuptools >= 65.3.0", # required by pyproject+setuptools_scm integration and editable installs
5 | "setuptools_scm[toml] >= 7.0.5" # required for "no-local-version" scheme
6 | ]
7 |
8 | [project]
9 | authors = [{"email" = "ssbarnea@redhat.com", "name" = "Sorin Sbarnea"}]
10 | classifiers = [
11 | "Development Status :: 5 - Production/Stable",
12 | "Environment :: Console",
13 | "Intended Audience :: Developers",
14 | "Intended Audience :: Information Technology",
15 | "Intended Audience :: System Administrators",
16 | "License :: OSI Approved :: MIT License",
17 | "Operating System :: OS Independent",
18 | "Programming Language :: Python :: 3",
19 | "Programming Language :: Python :: 3.10",
20 | "Programming Language :: Python :: 3.11",
21 | "Programming Language :: Python :: 3.12",
22 | "Programming Language :: Python :: 3.13",
23 | "Programming Language :: Python",
24 | "Topic :: System :: Systems Administration",
25 | "Topic :: Software Development :: Bug Tracking",
26 | "Topic :: Software Development :: Quality Assurance",
27 | "Topic :: Software Development :: Testing",
28 | "Topic :: Utilities"
29 | ]
30 | description = "Ansible compatibility goodies"
31 | dynamic = ["version", "dependencies", "optional-dependencies"]
32 | keywords = ["ansible"]
33 | license = {text = "MIT"}
34 | maintainers = [{"email" = "ssbarnea@redhat.com", "name" = "Sorin Sbarnea"}]
35 | name = "ansible-compat"
36 | readme = "README.md"
37 | # https://peps.python.org/pep-0621/#readme
38 | requires-python = ">=3.10"
39 |
40 | [project.urls]
41 | changelog = "https://github.com/ansible/ansible-compat/releases"
42 | documentation = "https://ansible.readthedocs.io/projects/compat/"
43 | homepage = "https://github.com/ansible/ansible-compat"
44 | repository = "https://github.com/ansible/ansible-compat"
45 |
46 | [tool.coverage.report]
47 | exclude_also = ["pragma: no cover", "if TYPE_CHECKING:"]
48 | fail_under = 100
49 | show_missing = true
50 | skip_covered = true
51 |
52 | [tool.coverage.run]
53 | # Do not use branch until bug is fixes:
54 | # https://github.com/nedbat/coveragepy/issues/605
55 | branch = false
56 | concurrency = ["multiprocessing", "thread"]
57 | parallel = true
58 | source = ["src"]
59 |
60 | [tool.isort]
61 | profile = "black"
62 |
63 | [tool.mypy]
64 | color_output = true
65 | disallow_any_generics = true
66 | disallow_untyped_calls = true
67 | disallow_untyped_defs = true
68 | error_summary = true
69 | # disallow_any_unimported = True
70 | # ; warn_redundant_casts = True
71 | # warn_return_any = True
72 | # warn_unused_configs = True
73 | exclude = "test/local-content"
74 | python_version = "3.10"
75 |
76 | [[tool.mypy.overrides]]
77 | ignore_missing_imports = true
78 | module = "ansible.*"
79 |
80 | [[tool.mypy.overrides]]
81 | ignore_errors = true
82 | # generated by setuptools-scm, can be missing during linting
83 | module = "ansible_compat._version"
84 |
85 | [tool.pydoclint]
86 | allow-init-docstring = true
87 | arg-type-hints-in-docstring = false
88 | baseline = ".config/pydoclint-baseline.txt"
89 | check-return-types = false
90 | check-yield-types = false
91 | exclude = '\.ansible|\.cache|\.git|\.tox|build|ansible_collections|out|venv'
92 | should-document-private-class-attributes = true
93 | show-filenames-in-every-violation-message = true
94 | skip-checking-short-docstrings = false
95 | style = "google"
96 |
97 | [tool.pylint.BASIC]
98 | good-names = [
99 | "f", # filename
100 | "i",
101 | "j",
102 | "k",
103 | "ns", # namespace
104 | "ex",
105 | "Run",
106 | "_"
107 | ]
108 |
109 | [tool.pylint.IMPORTS]
110 | preferred-modules = ["unittest:pytest"]
111 |
112 | [tool.pylint.MASTER]
113 | # _version.py is generated by setuptools-scm.
114 | ignore-paths = "^src/ansible_compat/_version.py"
115 |
116 | [tool.pylint."MESSAGES CONTROL"]
117 | disable = [
118 | "unknown-option-value",
119 | # https://gist.github.com/cidrblock/ec3412bacfeb34dbc2d334c1d53bef83
120 | "C0103", # invalid-name / ruff N815
121 | "C0105", # typevar-name-incorrect-variance / ruff PLC0105
122 | "C0112", # empty-docstring / ruff D419
123 | "C0113", # unneeded-not / ruff SIM208
124 | "C0114", # missing-module-docstring / ruff D100
125 | "C0115", # missing-class-docstring / ruff D101
126 | "C0116", # missing-function-docstring / ruff D103
127 | "C0121", # singleton-comparison / ruff PLC0121
128 | "C0123", # unidiomatic-typecheck / ruff E721
129 | "C0131", # typevar-double-variance / ruff PLC0131
130 | "C0132", # typevar-name-mismatch / ruff PLC0132
131 | "C0198", # bad-docstring-quotes / ruff Q002
132 | "C0199", # docstring-first-line-empty / ruff D210
133 | "C0201", # consider-iterating-dictionary / ruff SIM118
134 | "C0202", # bad-classmethod-argument / ruff PLC0202
135 | "C0205", # single-string-used-for-slots / ruff PLC0205
136 | "C0208", # use-sequence-for-iteration / ruff PLC0208
137 | "C0301", # line-too-long / ruff E501
138 | "C0303", # trailing-whitespace / ruff W291
139 | "C0304", # missing-final-newline / ruff W292
140 | "C0321", # multiple-statements / ruff PLC0321
141 | "C0410", # multiple-imports / ruff E401
142 | "C0411", # wrong-import-order / ruff I001
143 | "C0412", # ungrouped-imports / ruff I001
144 | "C0413", # wrong-import-position / ruff E402
145 | "C0414", # useless-import-alias / ruff PLC0414
146 | "C0415", # import-outside-toplevel / ruff PLC0415
147 | "C0501", # consider-using-any-or-all / ruff PLC0501
148 | "C1901", # compare-to-empty-string / ruff PLC1901
149 | "C2201", # misplaced-comparison-constant / ruff SIM300
150 | "C2401", # non-ascii-name / ruff PLC2401
151 | "C2403", # non-ascii-module-import / ruff PLC2403
152 | "C2701", # import-private-name / ruff PLC2701
153 | "C2801", # unnecessary-dunder-call / ruff PLC2801
154 | "C3001", # unnecessary-lambda-assignment / ruff E731
155 | "C3002", # unnecessary-direct-lambda-call / ruff PLC3002
156 | "E0001", # syntax-error / ruff E999
157 | "E0100", # init-is-generator / ruff PLE0100
158 | "E0101", # return-in-init / ruff PLE0101
159 | "E0102", # function-redefined / ruff F811
160 | "E0103", # not-in-loop / ruff PLE0103
161 | "E0104", # return-outside-function / ruff F706
162 | "E0105", # yield-outside-function / ruff F704
163 | "E0107", # nonexistent-operator / ruff B002
164 | "E0112", # too-many-star-expressions / ruff F622
165 | "E0115", # nonlocal-and-global / ruff PLE0115
166 | "E0116", # continue-in-finally / ruff PLE0116
167 | "E0117", # nonlocal-without-binding / ruff PLE0117
168 | "E0118", # used-prior-global-declaration / ruff PLE0118
169 | "E0211", # no-method-argument / ruff N805
170 | "E0213", # no-self-argument / ruff N805
171 | "E0237", # assigning-non-slot / ruff PLE0237
172 | "E0241", # duplicate-bases / ruff PLE0241
173 | "E0302", # unexpected-special-method-signature / ruff PLE0302
174 | "E0303", # invalid-length-returned / ruff PLE0303
175 | "E0304", # invalid-bool-returned / ruff PLE0304
176 | "E0305", # invalid-index-returned / ruff PLE0305
177 | "E0308", # invalid-bytes-returned / ruff PLE0308
178 | "E0309", # invalid-hash-returned / ruff PLE0309
179 | "E0402", # relative-beyond-top-level / ruff TID252
180 | "E0602", # undefined-variable / ruff F821
181 | "E0603", # undefined-all-variable / ruff F822
182 | "E0604", # invalid-all-object / ruff PLE0604
183 | "E0605", # invalid-all-format / ruff PLE0605
184 | "E0643", # potential-index-error / ruff PLE0643
185 | "E0704", # misplaced-bare-raise / ruff PLE0704
186 | "E0711", # notimplemented-raised / ruff F901
187 | "E1132", # repeated-keyword / ruff PLE1132
188 | "E1142", # await-outside-async / ruff PLE1142
189 | "E1205", # logging-too-many-args / ruff PLE1205
190 | "E1206", # logging-too-few-args / ruff PLE1206
191 | "E1300", # bad-format-character / ruff PLE1300
192 | "E1301", # truncated-format-string / ruff F501
193 | "E1302", # mixed-format-string / ruff F506
194 | "E1303", # format-needs-mapping / ruff F502
195 | "E1304", # missing-format-string-key / ruff F524
196 | "E1305", # too-many-format-args / ruff F522
197 | "E1306", # too-few-format-args / ruff F524
198 | "E1307", # bad-string-format-type / ruff PLE1307
199 | "E1310", # bad-str-strip-call / ruff PLE1310
200 | "E1519", # singledispatch-method / ruff PLE1519
201 | "E1520", # singledispatchmethod-function / ruff PLE5120
202 | "E1700", # yield-inside-async-function / ruff PLE1700
203 | "E2502", # bidirectional-unicode / ruff PLE2502
204 | "E2510", # invalid-character-backspace / ruff PLE2510
205 | "E2512", # invalid-character-sub / ruff PLE2512
206 | "E2513", # invalid-character-esc / ruff PLE2513
207 | "E2514", # invalid-character-nul / ruff PLE2514
208 | "E2515", # invalid-character-zero-width-space / ruff PLE2515
209 | "E4703", # modified-iterating-set / ruff PLE4703
210 | "R0123", # literal-comparison / ruff F632
211 | "R0124", # comparison-with-itself / ruff PLR0124
212 | "R0133", # comparison-of-constants / ruff PLR0133
213 | "R0202", # no-classmethod-decorator / ruff PLR0202
214 | "R0203", # no-staticmethod-decorator / ruff PLR0203
215 | "R0205", # useless-object-inheritance / ruff UP004
216 | "R0206", # property-with-parameters / ruff PLR0206
217 | "R0904", # too-many-public-methods / ruff PLR0904
218 | "R0911", # too-many-return-statements / ruff PLR0911
219 | "R0912", # too-many-branches / ruff PLR0912
220 | "R0913", # too-many-arguments / ruff PLR0913
221 | "R0914", # too-many-locals / ruff PLR0914
222 | "R0915", # too-many-statements / ruff PLR0915
223 | "R0916", # too-many-boolean-expressions / ruff PLR0916
224 | "R0917", # too-many-positional-arguments / ruff PLR0917
225 | "R1260", # too-complex / ruff C901
226 | "R1701", # consider-merging-isinstance / ruff PLR1701
227 | "R1702", # too-many-nested-blocks / ruff PLR1702
228 | "R1703", # simplifiable-if-statement / ruff SIM108
229 | "R1704", # redefined-argument-from-local / ruff PLR1704
230 | "R1705", # no-else-return / ruff RET505
231 | "R1706", # consider-using-ternary / ruff PLR1706
232 | "R1707", # trailing-comma-tuple / ruff COM818
233 | "R1710", # inconsistent-return-statements / ruff PLR1710
234 | "R1711", # useless-return / ruff PLR1711
235 | "R1714", # consider-using-in / ruff PLR1714
236 | "R1715", # consider-using-get / ruff SIM401
237 | "R1717", # consider-using-dict-comprehension / ruff C402
238 | "R1718", # consider-using-set-comprehension / ruff C401
239 | "R1719", # simplifiable-if-expression / ruff PLR1719
240 | "R1720", # no-else-raise / ruff RET506
241 | "R1721", # unnecessary-comprehension / ruff C416
242 | "R1722", # consider-using-sys-exit / ruff PLR1722
243 | "R1723", # no-else-break / ruff RET508
244 | "R1724", # no-else-continue / ruff RET507
245 | "R1725", # super-with-arguments / ruff UP008
246 | "R1728", # consider-using-generator / ruff C417
247 | "R1729", # use-a-generator / ruff C419
248 | "R1730", # consider-using-min-builtin / ruff PLR1730
249 | "R1731", # consider-using-max-builtin / ruff PLR1730
250 | "R1732", # consider-using-with / ruff SIM115
251 | "R1733", # unnecessary-dict-index-lookup / ruff PLR1733
252 | "R1734", # use-list-literal / ruff C405
253 | "R1735", # use-dict-literal / ruff C406
254 | "R1736", # unnecessary-list-index-lookup / ruff PLR1736
255 | "R2004", # magic-value-comparison / ruff PLR2004
256 | "R2044", # empty-comment / ruff PLR2044
257 | "R5501", # else-if-used / ruff PLR5501
258 | "R6002", # consider-using-alias / ruff UP006
259 | "R6003", # consider-alternative-union-syntax / ruff UP007
260 | "R6104", # consider-using-augmented-assign / ruff PLR6104
261 | "R6201", # use-set-for-membership / ruff PLR6201
262 | "R6301", # no-self-use / ruff PLR6301
263 | "W0102", # dangerous-default-value / ruff B006
264 | "W0104", # pointless-statement / ruff B018
265 | "W0106", # expression-not-assigned / ruff B018
266 | "W0107", # unnecessary-pass / ruff PIE790
267 | "W0108", # unnecessary-lambda / ruff PLW0108
268 | "W0109", # duplicate-key / ruff F601
269 | "W0120", # useless-else-on-loop / ruff PLW0120
270 | "W0122", # exec-used / ruff S102
271 | "W0123", # eval-used / ruff PGH001
272 | "W0127", # self-assigning-variable / ruff PLW0127
273 | "W0129", # assert-on-string-literal / ruff PLW0129
274 | "W0130", # duplicate-value / ruff B033
275 | "W0131", # named-expr-without-context / ruff PLW0131
276 | "W0133", # pointless-exception-statement / ruff PLW0133
277 | "W0150", # lost-exception / ruff B012
278 | "W0160", # consider-ternary-expression / ruff SIM108
279 | "W0177", # nan-comparison / ruff PLW0117
280 | "W0199", # assert-on-tuple / ruff F631
281 | "W0211", # bad-staticmethod-argument / ruff PLW0211
282 | "W0212", # protected-access / ruff SLF001
283 | "W0245", # super-without-brackets / ruff PLW0245
284 | "W0301", # unnecessary-semicolon / ruff E703
285 | "W0401", # wildcard-import / ruff F403
286 | "W0404", # reimported / ruff F811
287 | "W0406", # import-self / ruff PLW0406
288 | "W0410", # misplaced-future / ruff F404
289 | "W0511", # fixme / ruff PLW0511
290 | "W0602", # global-variable-not-assigned / ruff PLW0602
291 | "W0603", # global-statement / ruff PLW0603
292 | "W0604", # global-at-module-level / ruff PLW0604
293 | "W0611", # unused-import / ruff F401
294 | "W0612", # unused-variable / ruff F841
295 | "W0613", # unused-argument / ruff ARG001
296 | "W0622", # redefined-builtin / ruff A001
297 | "W0640", # cell-var-from-loop / ruff B023
298 | "W0702", # bare-except / ruff E722
299 | "W0705", # duplicate-except / ruff B014
300 | "W0706", # try-except-raise / ruff TRY302
301 | "W0707", # raise-missing-from / ruff TRY200
302 | "W0711", # binary-op-exception / ruff PLW0711
303 | "W0718", # broad-exception-caught / ruff PLW0718
304 | "W0719", # broad-exception-raised / ruff TRY002
305 | "W1113", # keyword-arg-before-vararg / ruff B026
306 | "W1201", # logging-not-lazy / ruff G
307 | "W1202", # logging-format-interpolation / ruff G
308 | "W1203", # logging-fstring-interpolation / ruff G
309 | "W1300", # bad-format-string-key / ruff PLW1300
310 | "W1301", # unused-format-string-key / ruff F504
311 | "W1302", # bad-format-string / ruff PLW1302
312 | "W1303", # missing-format-argument-key / ruff F524
313 | "W1304", # unused-format-string-argument / ruff F507
314 | "W1305", # format-combined-specification / ruff F525
315 | "W1308", # duplicate-string-formatting-argument / ruff PLW1308
316 | "W1309", # f-string-without-interpolation / ruff F541
317 | "W1310", # format-string-without-interpolation / ruff F541
318 | "W1401", # anomalous-backslash-in-string / ruff W605
319 | "W1404", # implicit-str-concat / ruff ISC001
320 | "W1405", # inconsistent-quotes / ruff Q000
321 | "W1406", # redundant-u-string-prefix / ruff UP025
322 | "W1501", # bad-open-mode / ruff PLW1501
323 | "W1508", # invalid-envvar-default / ruff PLW1508
324 | "W1509", # subprocess-popen-preexec-fn / ruff PLW1509
325 | "W1510", # subprocess-run-check / ruff PLW1510
326 | "W1514", # unspecified-encoding / ruff PLW1514
327 | "W1515", # forgotten-debug-statement / ruff T100
328 | "W1518", # method-cache-max-size-none / ruff B019
329 | "W1641", # eq-without-hash / ruff PLW1641
330 | "W2101", # useless-with-lock / ruff PLW2101
331 | "W2402", # non-ascii-file-name / ruff N999
332 | "W2901", # redefined-loop-name / ruff PLW2901
333 | "W3201", # bad-dunder-name / ruff PLW3201
334 | "W3301", # nested-min-max / ruff PLW3301
335 | "duplicate-code",
336 | "fixme",
337 | "too-few-public-methods",
338 | "unsubscriptable-object",
339 | # On purpose disabled as we rely on black
340 | "line-too-long",
341 | "protected-access", # covered by ruff SLF001
342 | # local imports do not work well with pre-commit hook
343 | "import-error",
344 | # already covered by ruff which is faster
345 | "too-many-arguments", # PLR0913
346 | "raise-missing-from",
347 | # Temporary disable duplicate detection we remove old code from prerun
348 | "duplicate-code"
349 | ]
350 |
351 | [tool.pytest.ini_options]
352 | addopts = "-p no:pytest_cov --durations=10 --durations-min=1.0 --failed-first --instafail"
353 | # ensure we treat warnings as error
354 | filterwarnings = [
355 | "error",
356 | # py312 ansible-core
357 | # https://github.com/ansible/ansible/issues/81906
358 | "ignore:'importlib.abc.TraversableResources' is deprecated and slated for removal in Python 3.14:DeprecationWarning"
359 | ]
360 | junit_family = "legacy" # see https://docs.codecov.com/docs/test-analytics
361 | testpaths = ["test"]
362 |
363 | [tool.ruff]
364 | extend-include = ["src/ansible_compat/_version.py"]
365 | preview = true
366 | target-version = "py310"
367 |
368 | [tool.ruff.format]
369 | docstring-code-format = true
370 |
371 | [tool.ruff.lint]
372 | external = [
373 | "DOC" # pydoclint
374 | ]
375 | ignore = [
376 | # Disabled on purpose:
377 | "CPY001",
378 | "D203", # incompatible with D211
379 | "D211",
380 | "D213", # incompatible with D212
381 | "E501", # we use black
382 | "RET504", # Unnecessary variable assignment before `return` statement
383 | # Temporary disabled during adoption:
384 | "S607", # Starting a process with a partial executable path
385 | "PLR0912", # Bug https://github.com/charliermarsh/ruff/issues/4244
386 | "PLR0913", # Bug https://github.com/charliermarsh/ruff/issues/4244
387 | "RUF012",
388 | "PERF203",
389 | "DOC201", # preview
390 | "PLC0415",
391 | "DOC501" # preview
392 | ]
393 | select = ["ALL"]
394 |
395 | [tool.ruff.lint.flake8-pytest-style]
396 | parametrize-values-type = "tuple"
397 |
398 | [tool.ruff.lint.isort]
399 | known-first-party = ["ansible_compat"]
400 | known-third-party = ["packaging"]
401 |
402 | [tool.ruff.lint.per-file-ignores]
403 | "test/**/*.py" = [
404 | "DOC402",
405 | "DOC501",
406 | "FBT001",
407 | "PLC2701",
408 | "PLR0917",
409 | "S101",
410 | "S404",
411 | "S603", # subprocess
412 | "S607", # subprocess
413 | "SLF001"
414 | ]
415 | "tools/*.py" = ["S603"]
416 |
417 | [tool.ruff.lint.pydocstyle]
418 | convention = "google"
419 |
420 | [tool.setuptools.dynamic]
421 | dependencies = {file = [".config/requirements.in"]}
422 | optional-dependencies.docs = {file = [".config/requirements-docs.in"]}
423 | optional-dependencies.test = {file = [".config/requirements-test.in"]}
424 |
425 | [tool.setuptools_scm]
426 | # To prevent accidental pick of mobile version tags such 'v6'
427 | git_describe_command = [
428 | "git",
429 | "describe",
430 | "--dirty",
431 | "--tags",
432 | "--long",
433 | "--match",
434 | "v*.*"
435 | ]
436 | local_scheme = "no-local-version"
437 | tag_regex = "^(?Pv)?(?P\\d+[^\\+]*)(?P.*)?$"
438 | write_to = "src/ansible_compat/_version.py"
439 |
440 | [tool.tomlsort]
441 | in_place = true
442 | sort_inline_tables = true
443 | sort_table_keys = true
444 |
445 | [tool.uv.pip]
446 | annotation-style = "line"
447 | custom-compile-command = "tox run deps"
448 | no-emit-package = ["ansible-core", "pip", "resolvelib", "typing_extensions", "uv"]
449 |
--------------------------------------------------------------------------------
/src/ansible_compat/__init__.py:
--------------------------------------------------------------------------------
1 | """ansible_compat package."""
2 |
3 | from importlib.metadata import PackageNotFoundError, version
4 |
5 | try:
6 | __version__ = version("ansible-compat")
7 | except PackageNotFoundError: # pragma: no cover
8 | __version__ = "0.1.dev1"
9 |
10 | __all__ = ["__version__"]
11 |
--------------------------------------------------------------------------------
/src/ansible_compat/config.py:
--------------------------------------------------------------------------------
1 | """Store configuration options as a singleton."""
2 |
3 | from __future__ import annotations
4 |
5 | import ast
6 | import copy
7 | import os
8 | import re
9 | import subprocess # noqa: S404
10 | from collections import UserDict
11 | from typing import TYPE_CHECKING, Literal
12 |
13 | from packaging.version import Version
14 |
15 | from ansible_compat.constants import ANSIBLE_MIN_VERSION
16 | from ansible_compat.errors import InvalidPrerequisiteError, MissingAnsibleError
17 | from ansible_compat.ports import cache
18 |
19 | if TYPE_CHECKING: # pragma: no cover
20 | from pathlib import Path
21 |
22 |
23 | def parse_ansible_version(stdout: str) -> Version:
24 | """Parse output of 'ansible --version'."""
25 | # Ansible can produce extra output before displaying version in debug mode.
26 |
27 | # ansible-core 2.11+: 'ansible [core 2.11.3]'
28 | match = re.search(
29 | r"^ansible \[(?:core|base) (?P[^\]]+)\]",
30 | stdout,
31 | re.MULTILINE,
32 | )
33 | if match:
34 | return Version(match.group("version"))
35 | msg = f"Unable to parse ansible cli version: {stdout}\nKeep in mind that only {ANSIBLE_MIN_VERSION } or newer are supported."
36 | raise InvalidPrerequisiteError(msg)
37 |
38 |
39 | @cache
40 | def ansible_version(version: str = "") -> Version:
41 | """Return current Version object for Ansible.
42 |
43 | If version is not mentioned, it returns current version as detected.
44 | When version argument is mentioned, it return converts the version string
45 | to Version object in order to make it usable in comparisons.
46 | """
47 | if version:
48 | return Version(version)
49 |
50 | proc = subprocess.run(
51 | ["ansible", "--version"],
52 | text=True,
53 | check=False,
54 | capture_output=True,
55 | )
56 | if proc.returncode != 0:
57 | raise MissingAnsibleError(proc=proc)
58 |
59 | return parse_ansible_version(proc.stdout)
60 |
61 |
62 | class AnsibleConfig(
63 | UserDict[str, object],
64 | ): # pylint: disable=too-many-ancestors # noqa: DOC605
65 | """Interface to query Ansible configuration.
66 |
67 | This should allow user to access everything provided by `ansible-config dump` without having to parse the data himself.
68 |
69 | Attributes:
70 | _aliases:
71 | action_warnings:
72 | agnostic_become_prompt:
73 | allow_world_readable_tmpfiles:
74 | ansible_connection_path:
75 | ansible_cow_acceptlist:
76 | ansible_cow_path:
77 | ansible_cow_selection:
78 | ansible_force_color:
79 | ansible_nocolor:
80 | ansible_nocows:
81 | ansible_pipelining:
82 | any_errors_fatal:
83 | become_allow_same_user:
84 | become_plugin_path:
85 | cache_plugin:
86 | cache_plugin_connection:
87 | cache_plugin_prefix:
88 | cache_plugin_timeout:
89 | callable_accept_list:
90 | callbacks_enabled:
91 | collections_on_ansible_version_mismatch:
92 | collections_paths:
93 | collections_scan_sys_path:
94 | color_changed:
95 | color_console_prompt:
96 | color_debug:
97 | color_deprecate:
98 | color_diff_add:
99 | color_diff_lines:
100 | color_diff_remove:
101 | color_error:
102 | color_highlight:
103 | color_ok:
104 | color_skip:
105 | color_unreachable:
106 | color_verbose:
107 | color_warn:
108 | command_warnings:
109 | conditional_bare_vars:
110 | connection_facts_modules:
111 | controller_python_warning:
112 | coverage_remote_output:
113 | coverage_remote_paths:
114 | default_action_plugin_path:
115 | default_allow_unsafe_lookups:
116 | default_ask_pass:
117 | default_ask_vault_pass:
118 | default_become:
119 | default_become_ask_pass:
120 | default_become_exe:
121 | default_become_flags:
122 | default_become_method:
123 | default_become_user:
124 | default_cache_plugin_path:
125 | default_callback_plugin_path:
126 | default_cliconf_plugin_path:
127 | default_collections_path:
128 | default_connection_plugin_path:
129 | default_debug:
130 | default_executable:
131 | default_fact_path:
132 | default_filter_plugin_path:
133 | default_force_handlers:
134 | default_forks:
135 | default_gather_subset:
136 | default_gather_timeout:
137 | default_gathering:
138 | default_handler_includes_static:
139 | default_hash_behaviour:
140 | default_host_list:
141 | default_httpapi_plugin_path:
142 | default_internal_poll_interval:
143 | default_inventory_plugin_path:
144 | default_jinja2_extensions:
145 | default_jinja2_native:
146 | default_keep_remote_files:
147 | default_libvirt_lxc_noseclabel:
148 | default_load_callback_plugins:
149 | default_local_tmp:
150 | default_log_filter:
151 | default_log_path:
152 | default_lookup_plugin_path:
153 | default_managed_str:
154 | default_module_args:
155 | default_module_compression:
156 | default_module_name:
157 | default_module_path:
158 | default_module_utils_path:
159 | default_netconf_plugin_path:
160 | default_no_log:
161 | default_no_target_syslog:
162 | default_null_representation:
163 | default_poll_interval:
164 | default_private_key_file:
165 | default_private_role_vars:
166 | default_remote_port:
167 | default_remote_user:
168 | default_roles_path:
169 | default_selinux_special_fs:
170 | default_stdout_callback:
171 | default_strategy:
172 | default_strategy_plugin_path:
173 | default_su:
174 | default_syslog_facility:
175 | default_task_includes_static:
176 | default_terminal_plugin_path:
177 | default_test_plugin_path:
178 | default_timeout:
179 | default_transport:
180 | default_undefined_var_behavior:
181 | default_vars_plugin_path:
182 | default_vault_encrypt_identity:
183 | default_vault_id_match:
184 | default_vault_identity:
185 | default_vault_identity_list:
186 | default_vault_password_file:
187 | default_verbosity:
188 | deprecation_warnings:
189 | devel_warning:
190 | diff_always:
191 | diff_context:
192 | display_args_to_stdout:
193 | display_skipped_hosts:
194 | doc_fragment_plugin_path:
195 | docsite_root_url:
196 | duplicate_yaml_dict_key:
197 | enable_task_debugger:
198 | error_on_missing_handler:
199 | facts_modules:
200 | galaxy_cache_dir:
201 | galaxy_display_progress:
202 | galaxy_ignore_certs:
203 | galaxy_role_skeleton:
204 | galaxy_role_skeleton_ignore:
205 | galaxy_server:
206 | galaxy_server_list:
207 | galaxy_token_path:
208 | host_key_checking:
209 | host_pattern_mismatch:
210 | inject_facts_as_vars:
211 | interpreter_python:
212 | interpreter_python_distro_map:
213 | interpreter_python_fallback:
214 | invalid_task_attribute_failed:
215 | inventory_any_unparsed_is_failed:
216 | inventory_cache_enabled:
217 | inventory_cache_plugin:
218 | inventory_cache_plugin_connection:
219 | inventory_cache_plugin_prefix:
220 | inventory_cache_timeout:
221 | inventory_enabled:
222 | inventory_export:
223 | inventory_ignore_exts:
224 | inventory_ignore_patterns:
225 | inventory_unparsed_is_failed:
226 | localhost_warning:
227 | max_file_size_for_diff:
228 | module_ignore_exts:
229 | netconf_ssh_config:
230 | network_group_modules:
231 | old_plugin_cache_clearing:
232 | paramiko_host_key_auto_add:
233 | paramiko_look_for_keys:
234 | persistent_command_timeout:
235 | persistent_connect_retry_timeout:
236 | persistent_connect_timeout:
237 | persistent_control_path_dir:
238 | playbook_dir:
239 | playbook_vars_root:
240 | plugin_filters_cfg:
241 | python_module_rlimit_nofile:
242 | retry_files_enabled:
243 | retry_files_save_path:
244 | run_vars_plugins:
245 | show_custom_stats:
246 | string_conversion_action:
247 | string_type_filters:
248 | system_warnings:
249 | tags_run:
250 | tags_skip:
251 | task_debugger_ignore_errors:
252 | task_timeout:
253 | transform_invalid_group_chars:
254 | use_persistent_connections:
255 | variable_plugins_enabled:
256 | variable_precedence:
257 | verbose_to_stderr:
258 | win_async_startup_timeout:
259 | worker_shutdown_poll_count:
260 | worker_shutdown_poll_delay:
261 | yaml_filename_extensions:
262 | """
263 |
264 | _aliases = {
265 | "COLLECTIONS_PATH": "COLLECTIONS_PATHS", # 2.9 -> 2.10
266 | }
267 | # Expose some attributes to enable auto-complete in editors, based on
268 | # https://docs.ansible.com/ansible/latest/reference_appendices/config.html
269 | action_warnings: bool = True
270 | agnostic_become_prompt: bool = True
271 | allow_world_readable_tmpfiles: bool = False
272 | ansible_connection_path: str | None = None
273 | ansible_cow_acceptlist: list[str]
274 | ansible_cow_path: str | None = None
275 | ansible_cow_selection: str = "default"
276 | ansible_force_color: bool = False
277 | ansible_nocolor: bool = False
278 | ansible_nocows: bool = False
279 | ansible_pipelining: bool = False
280 | any_errors_fatal: bool = False
281 | become_allow_same_user: bool = False
282 | become_plugin_path: list[str] = [
283 | "~/.ansible/plugins/become",
284 | "/usr/share/ansible/plugins/become",
285 | ]
286 | cache_plugin: str = "memory"
287 | cache_plugin_connection: str | None = None
288 | cache_plugin_prefix: str = "ansible_facts"
289 | cache_plugin_timeout: int = 86400
290 | callable_accept_list: list[str] = []
291 | callbacks_enabled: list[str] = []
292 | collections_on_ansible_version_mismatch: Literal["warning", "ignore"] = "warning"
293 | collections_paths: list[str] = [
294 | "~/.ansible/collections",
295 | "/usr/share/ansible/collections",
296 | ]
297 | collections_scan_sys_path: bool = True
298 | color_changed: str = "yellow"
299 | color_console_prompt: str = "white"
300 | color_debug: str = "dark gray"
301 | color_deprecate: str = "purple"
302 | color_diff_add: str = "green"
303 | color_diff_lines: str = "cyan"
304 | color_diff_remove: str = "red"
305 | color_error: str = "red"
306 | color_highlight: str = "white"
307 | color_ok: str = "green"
308 | color_skip: str = "cyan"
309 | color_unreachable: str = "bright red"
310 | color_verbose: str = "blue"
311 | color_warn: str = "bright purple"
312 | command_warnings: bool = False
313 | conditional_bare_vars: bool = False
314 | connection_facts_modules: dict[str, str]
315 | controller_python_warning: bool = True
316 | coverage_remote_output: str | None
317 | coverage_remote_paths: list[str]
318 | default_action_plugin_path: list[str] = [
319 | "~/.ansible/plugins/action",
320 | "/usr/share/ansible/plugins/action",
321 | ]
322 | default_allow_unsafe_lookups: bool = False
323 | default_ask_pass: bool = False
324 | default_ask_vault_pass: bool = False
325 | default_become: bool = False
326 | default_become_ask_pass: bool = False
327 | default_become_exe: str | None = None
328 | default_become_flags: str
329 | default_become_method: str = "sudo"
330 | default_become_user: str = "root"
331 | default_cache_plugin_path: list[str] = [
332 | "~/.ansible/plugins/cache",
333 | "/usr/share/ansible/plugins/cache",
334 | ]
335 | default_callback_plugin_path: list[str] = [
336 | "~/.ansible/plugins/callback",
337 | "/usr/share/ansible/plugins/callback",
338 | ]
339 | default_cliconf_plugin_path: list[str] = [
340 | "~/.ansible/plugins/cliconf",
341 | "/usr/share/ansible/plugins/cliconf",
342 | ]
343 | default_connection_plugin_path: list[str] = [
344 | "~/.ansible/plugins/connection",
345 | "/usr/share/ansible/plugins/connection",
346 | ]
347 | default_debug: bool = False
348 | default_executable: str = "/bin/sh"
349 | default_fact_path: str | None = None
350 | default_filter_plugin_path: list[str] = [
351 | "~/.ansible/plugins/filter",
352 | "/usr/share/ansible/plugins/filter",
353 | ]
354 | default_force_handlers: bool = False
355 | default_forks: int = 5
356 | default_gather_subset: list[str] = ["all"]
357 | default_gather_timeout: int = 10
358 | default_gathering: Literal["smart", "explicit", "implicit"] = "smart"
359 | default_handler_includes_static: bool = False
360 | default_hash_behaviour: str = "replace"
361 | default_host_list: list[str] = ["/etc/ansible/hosts"]
362 | default_httpapi_plugin_path: list[str] = [
363 | "~/.ansible/plugins/httpapi",
364 | "/usr/share/ansible/plugins/httpapi",
365 | ]
366 | default_internal_poll_interval: float = 0.001
367 | default_inventory_plugin_path: list[str] = [
368 | "~/.ansible/plugins/inventory",
369 | "/usr/share/ansible/plugins/inventory",
370 | ]
371 | default_jinja2_extensions: list[str] = []
372 | default_jinja2_native: bool = False
373 | default_keep_remote_files: bool = False
374 | default_libvirt_lxc_noseclabel: bool = False
375 | default_load_callback_plugins: bool = False
376 | default_local_tmp: str = "~/.ansible/tmp"
377 | default_log_filter: list[str] = []
378 | default_log_path: str | None = None
379 | default_lookup_plugin_path: list[str] = [
380 | "~/.ansible/plugins/lookup",
381 | "/usr/share/ansible/plugins/lookup",
382 | ]
383 | default_managed_str: str = "Ansible managed"
384 | default_module_args: str
385 | default_module_compression: str = "ZIP_DEFLATED"
386 | default_module_name: str = "command"
387 | default_module_path: list[str] = [
388 | "~/.ansible/plugins/modules",
389 | "/usr/share/ansible/plugins/modules",
390 | ]
391 | default_module_utils_path: list[str] = [
392 | "~/.ansible/plugins/module_utils",
393 | "/usr/share/ansible/plugins/module_utils",
394 | ]
395 | default_netconf_plugin_path: list[str] = [
396 | "~/.ansible/plugins/netconf",
397 | "/usr/share/ansible/plugins/netconf",
398 | ]
399 | default_no_log: bool = False
400 | default_no_target_syslog: bool = False
401 | default_null_representation: str | None = None
402 | default_poll_interval: int = 15
403 | default_private_key_file: str | None = None
404 | default_private_role_vars: bool = False
405 | default_remote_port: str | None = None
406 | default_remote_user: str | None = None
407 | # https://docs.ansible.com/ansible/latest/reference_appendices/config.html#collections-paths
408 | default_collections_path: list[str] = [
409 | "~/.ansible/collections",
410 | "/usr/share/ansible/collections",
411 | ]
412 | default_roles_path: list[str] = [
413 | "~/.ansible/roles",
414 | "/usr/share/ansible/roles",
415 | "/etc/ansible/roles",
416 | ]
417 | default_selinux_special_fs: list[str] = [
418 | "fuse",
419 | "nfs",
420 | "vboxsf",
421 | "ramfs",
422 | "9p",
423 | "vfat",
424 | ]
425 | default_stdout_callback: str = "default"
426 | default_strategy: str = "linear"
427 | default_strategy_plugin_path: list[str] = [
428 | "~/.ansible/plugins/strategy",
429 | "/usr/share/ansible/plugins/strategy",
430 | ]
431 | default_su: bool = False
432 | default_syslog_facility: str = "LOG_USER"
433 | default_task_includes_static: bool = False
434 | default_terminal_plugin_path: list[str] = [
435 | "~/.ansible/plugins/terminal",
436 | "/usr/share/ansible/plugins/terminal",
437 | ]
438 | default_test_plugin_path: list[str] = [
439 | "~/.ansible/plugins/test",
440 | "/usr/share/ansible/plugins/test",
441 | ]
442 | default_timeout: int = 10
443 | default_transport: str = "smart"
444 | default_undefined_var_behavior: bool = True
445 | default_vars_plugin_path: list[str] = [
446 | "~/.ansible/plugins/vars",
447 | "/usr/share/ansible/plugins/vars",
448 | ]
449 | default_vault_encrypt_identity: str | None = None
450 | default_vault_id_match: bool = False
451 | default_vault_identity: str = "default"
452 | default_vault_identity_list: list[str] = []
453 | default_vault_password_file: str | None = None
454 | default_verbosity: int = 0
455 | deprecation_warnings: bool = False
456 | devel_warning: bool = True
457 | diff_always: bool = False
458 | diff_context: int = 3
459 | display_args_to_stdout: bool = False
460 | display_skipped_hosts: bool = True
461 | docsite_root_url: str = "https://docs.ansible.com/ansible/"
462 | doc_fragment_plugin_path: list[str] = [
463 | "~/.ansible/plugins/doc_fragments",
464 | "/usr/share/ansible/plugins/doc_fragments",
465 | ]
466 | duplicate_yaml_dict_key: Literal["warn", "error", "ignore"] = "warn"
467 | enable_task_debugger: bool = False
468 | error_on_missing_handler: bool = True
469 | facts_modules: list[str] = ["smart"]
470 | galaxy_cache_dir: str = "~/.ansible/galaxy_cache"
471 | galaxy_display_progress: str | None = None
472 | galaxy_ignore_certs: bool = False
473 | galaxy_role_skeleton: str | None = None
474 | galaxy_role_skeleton_ignore: list[str] = ["^.git$", "^.*/.git_keep$"]
475 | galaxy_server: str = "https://galaxy.ansible.com"
476 | galaxy_server_list: str | None = None
477 | galaxy_token_path: str = "~/.ansible/galaxy_token" # noqa: S105
478 | host_key_checking: bool = True
479 | host_pattern_mismatch: Literal["warning", "error", "ignore"] = "warning"
480 | inject_facts_as_vars: bool = True
481 | interpreter_python: str = "auto_legacy"
482 | interpreter_python_distro_map: dict[str, str]
483 | interpreter_python_fallback: list[str]
484 | invalid_task_attribute_failed: bool = True
485 | inventory_any_unparsed_is_failed: bool = False
486 | inventory_cache_enabled: bool = False
487 | inventory_cache_plugin: str | None = None
488 | inventory_cache_plugin_connection: str | None = None
489 | inventory_cache_plugin_prefix: str = "ansible_facts"
490 | inventory_cache_timeout: int = 3600
491 | inventory_enabled: list[str] = [
492 | "host_list",
493 | "script",
494 | "auto",
495 | "yaml",
496 | "ini",
497 | "toml",
498 | ]
499 | inventory_export: bool = False
500 | inventory_ignore_exts: str
501 | inventory_ignore_patterns: list[str] = []
502 | inventory_unparsed_is_failed: bool = False
503 | localhost_warning: bool = True
504 | max_file_size_for_diff: int = 104448
505 | module_ignore_exts: str
506 | netconf_ssh_config: str | None = None
507 | network_group_modules: list[str] = [
508 | "eos",
509 | "nxos",
510 | "ios",
511 | "iosxr",
512 | "junos",
513 | "enos",
514 | "ce",
515 | "vyos",
516 | "sros",
517 | "dellos9",
518 | "dellos10",
519 | "dellos6",
520 | "asa",
521 | "aruba",
522 | "aireos",
523 | "bigip",
524 | "ironware",
525 | "onyx",
526 | "netconf",
527 | "exos",
528 | "voss",
529 | "slxos",
530 | ]
531 | old_plugin_cache_clearing: bool = False
532 | paramiko_host_key_auto_add: bool = False
533 | paramiko_look_for_keys: bool = True
534 | persistent_command_timeout: int = 30
535 | persistent_connect_retry_timeout: int = 15
536 | persistent_connect_timeout: int = 30
537 | persistent_control_path_dir: str = "~/.ansible/pc"
538 | playbook_dir: str | None
539 | playbook_vars_root: Literal["top", "bottom", "all"] = "top"
540 | plugin_filters_cfg: str | None = None
541 | python_module_rlimit_nofile: int = 0
542 | retry_files_enabled: bool = False
543 | retry_files_save_path: str | None = None
544 | run_vars_plugins: str = "demand"
545 | show_custom_stats: bool = False
546 | string_conversion_action: Literal["warn", "error", "ignore"] = "warn"
547 | string_type_filters: list[str] = [
548 | "string",
549 | "to_json",
550 | "to_nice_json",
551 | "to_yaml",
552 | "to_nice_yaml",
553 | "ppretty",
554 | "json",
555 | ]
556 | system_warnings: bool = True
557 | tags_run: list[str] = []
558 | tags_skip: list[str] = []
559 | task_debugger_ignore_errors: bool = True
560 | task_timeout: int = 0
561 | transform_invalid_group_chars: Literal[
562 | "always",
563 | "never",
564 | "ignore",
565 | "silently",
566 | ] = "never"
567 | use_persistent_connections: bool = False
568 | variable_plugins_enabled: list[str] = ["host_group_vars"]
569 | variable_precedence: list[str] = [
570 | "all_inventory",
571 | "groups_inventory",
572 | "all_plugins_inventory",
573 | "all_plugins_play",
574 | "groups_plugins_inventory",
575 | "groups_plugins_play",
576 | ]
577 | verbose_to_stderr: bool = False
578 | win_async_startup_timeout: int = 5
579 | worker_shutdown_poll_count: int = 0
580 | worker_shutdown_poll_delay: float = 0.1
581 | yaml_filename_extensions: list[str] = [".yml", ".yaml", ".json"]
582 |
583 | def __init__(
584 | self,
585 | config_dump: str | None = None,
586 | data: dict[str, object] | None = None,
587 | cache_dir: Path | None = None,
588 | ) -> None:
589 | """Load config dictionary."""
590 | super().__init__()
591 |
592 | self.cache_dir = cache_dir
593 | if data:
594 | self.data = copy.deepcopy(data)
595 | else:
596 | if not config_dump:
597 | env = os.environ.copy()
598 | # Avoid possible ANSI garbage
599 | env["ANSIBLE_FORCE_COLOR"] = "0"
600 | config_dump = subprocess.check_output(
601 | ["ansible-config", "dump"],
602 | universal_newlines=True,
603 | env=env,
604 | )
605 |
606 | for match in re.finditer(
607 | r"^(?P[A-Za-z0-9_]+).* = (?P.*)$",
608 | config_dump,
609 | re.MULTILINE,
610 | ):
611 | key = match.groupdict()["key"]
612 | value = match.groupdict()["value"]
613 | try:
614 | self[key] = ast.literal_eval(value)
615 | except (NameError, SyntaxError, ValueError):
616 | self[key] = value
617 | if data:
618 | return
619 |
620 | def __getattribute__(self, attr_name: str) -> object:
621 | """Allow access of config options as attributes."""
622 | parent_dict = super().__dict__ # pylint: disable=no-member
623 | if attr_name in parent_dict:
624 | return parent_dict[attr_name]
625 |
626 | data = super().__getattribute__("data")
627 | if attr_name == "data": # pragma: no cover
628 | return data
629 |
630 | name = attr_name.upper()
631 | if name in data:
632 | return data[name]
633 | if name in AnsibleConfig._aliases:
634 | return data[AnsibleConfig._aliases[name]]
635 |
636 | return super().__getattribute__(attr_name)
637 |
638 | def __getitem__(self, name: str) -> object:
639 | """Allow access to config options using indexing."""
640 | return super().__getitem__(name.upper())
641 |
642 | def __copy__(self) -> AnsibleConfig:
643 | """Allow users to run copy on Config."""
644 | return AnsibleConfig(data=self.data)
645 |
646 | def __deepcopy__(self, memo: object) -> AnsibleConfig:
647 | """Allow users to run deeepcopy on Config."""
648 | return AnsibleConfig(data=self.data)
649 |
650 |
651 | __all__ = [
652 | "AnsibleConfig",
653 | "ansible_version",
654 | "parse_ansible_version",
655 | ]
656 |
--------------------------------------------------------------------------------
/src/ansible_compat/constants.py:
--------------------------------------------------------------------------------
1 | """Constants used by ansible_compat."""
2 |
3 | from pathlib import Path
4 |
5 | META_MAIN = (Path("meta") / Path("main.yml"), Path("meta") / Path("main.yaml"))
6 | REQUIREMENT_LOCATIONS = [
7 | "requirements.yml",
8 | "roles/requirements.yml",
9 | "collections/requirements.yml",
10 | # These is more of less the official way to store test requirements in collections so far, comments shows number of repos using this reported by https://sourcegraph.com/ at the time of writing
11 | "tests/requirements.yml", # 170
12 | "tests/integration/requirements.yml", # 3
13 | "tests/unit/requirements.yml", # 1
14 | ]
15 |
16 | # Minimal version of Ansible we support for runtime
17 | ANSIBLE_MIN_VERSION = "2.16"
18 |
19 | # Based on https://docs.ansible.com/ansible/latest/reference_appendices/config.html
20 | ANSIBLE_DEFAULT_ROLES_PATH = (
21 | "~/.ansible/roles:/usr/share/ansible/roles:/etc/ansible/roles"
22 | )
23 |
24 | INVALID_CONFIG_RC = 2
25 | ANSIBLE_MISSING_RC = 4
26 | INVALID_PREREQUISITES_RC = 10
27 |
28 | MSG_INVALID_FQRL = """\
29 | Computed fully qualified role name of {0} does not follow current galaxy requirements.
30 | Please edit meta/main.yml and assure we can correctly determine full role name:
31 |
32 | galaxy_info:
33 | role_name: my_name # if absent directory name hosting role is used instead
34 | namespace: my_galaxy_namespace # if absent, author is used instead
35 |
36 | Namespace: https://old-galaxy.ansible.com/docs/contributing/namespaces.html#galaxy-namespace-limitations
37 | Role: https://old-galaxy.ansible.com/docs/contributing/creating_role.html#role-names
38 |
39 | As an alternative, you can add 'role-name' to either skip_list or warn_list.
40 | """
41 |
42 | RC_ANSIBLE_OPTIONS_ERROR = 5
43 |
--------------------------------------------------------------------------------
/src/ansible_compat/errors.py:
--------------------------------------------------------------------------------
1 | """Module to deal with errors."""
2 |
3 | from __future__ import annotations
4 |
5 | from typing import TYPE_CHECKING, Any
6 |
7 | from ansible_compat.constants import ANSIBLE_MISSING_RC, INVALID_PREREQUISITES_RC
8 |
9 | if TYPE_CHECKING: # pragma: no cover
10 | from subprocess import CompletedProcess # noqa: S404
11 |
12 |
13 | class AnsibleCompatError(RuntimeError):
14 | """Generic error originating from ansible_compat library."""
15 |
16 | code = 1 # generic error
17 |
18 | def __init__(
19 | self,
20 | message: str | None = None,
21 | proc: CompletedProcess[Any] | None = None,
22 | ) -> None:
23 | """Construct generic library exception."""
24 | super().__init__(message)
25 | self.proc = proc
26 |
27 |
28 | class AnsibleCommandError(RuntimeError):
29 | """Exception running an Ansible command."""
30 |
31 | def __init__(self, proc: CompletedProcess[Any]) -> None:
32 | """Construct an exception given a completed process."""
33 | message = (
34 | f"Got {proc.returncode} exit code while running: {' '.join(proc.args)}"
35 | )
36 | super().__init__(message)
37 | self.proc = proc
38 |
39 |
40 | class MissingAnsibleError(AnsibleCompatError):
41 | """Reports a missing or broken Ansible installation."""
42 |
43 | code = ANSIBLE_MISSING_RC
44 |
45 | def __init__(
46 | self,
47 | message: str | None = "Unable to find a working copy of ansible executable.",
48 | proc: CompletedProcess[Any] | None = None,
49 | ) -> None:
50 | """."""
51 | super().__init__(message)
52 | self.proc = proc
53 |
54 |
55 | class InvalidPrerequisiteError(AnsibleCompatError):
56 | """Reports a missing requirement."""
57 |
58 | code = INVALID_PREREQUISITES_RC
59 |
--------------------------------------------------------------------------------
/src/ansible_compat/loaders.py:
--------------------------------------------------------------------------------
1 | """Utilities for loading various files."""
2 |
3 | from __future__ import annotations
4 |
5 | from typing import TYPE_CHECKING, Any
6 |
7 | import yaml
8 |
9 | from ansible_compat.errors import InvalidPrerequisiteError
10 |
11 | if TYPE_CHECKING: # pragma: no cover
12 | from pathlib import Path
13 |
14 |
15 | def yaml_from_file(path: Path) -> Any: # noqa: ANN401
16 | """Return a loaded YAML file."""
17 | with path.open(encoding="utf-8") as content:
18 | return yaml.load(content, Loader=yaml.SafeLoader)
19 |
20 |
21 | def colpath_from_path(path: Path) -> str | None:
22 | """Return a FQCN from a path."""
23 | galaxy_file = path / "galaxy.yml"
24 | if galaxy_file.exists():
25 | galaxy = yaml_from_file(galaxy_file)
26 | for k in ("namespace", "name"):
27 | if k not in galaxy:
28 | msg = f"{galaxy_file} is missing the following mandatory field {k}"
29 | raise InvalidPrerequisiteError(msg)
30 | return f"{galaxy['namespace']}/{galaxy['name']}"
31 | return None
32 |
--------------------------------------------------------------------------------
/src/ansible_compat/ports.py:
--------------------------------------------------------------------------------
1 | """Portability helpers."""
2 |
3 | from functools import cache, cached_property
4 |
5 | __all__ = ["cache", "cached_property"]
6 |
--------------------------------------------------------------------------------
/src/ansible_compat/prerun.py:
--------------------------------------------------------------------------------
1 | """Utilities for configuring ansible runtime environment."""
2 |
3 | import hashlib
4 | import os
5 | import tempfile
6 | import warnings
7 | from pathlib import Path
8 |
9 |
10 | def is_writable(path: Path) -> bool:
11 | """Check if path is writable, creating if necessary.
12 |
13 | Args:
14 | path: Path to check.
15 |
16 | Returns:
17 | True if path is writable, False otherwise.
18 | """
19 | try:
20 | path.mkdir(parents=True, exist_ok=True)
21 | except OSError:
22 | return False
23 | return path.exists() and os.access(path, os.W_OK)
24 |
25 |
26 | def get_cache_dir(project_dir: Path, *, isolated: bool = True) -> Path:
27 | """Compute cache directory to be used based on project path.
28 |
29 | Args:
30 | project_dir: Path to the project directory.
31 | isolated: Whether to use isolated cache directory.
32 |
33 | Returns:
34 | A writable cache directory.
35 |
36 | Raises:
37 | RuntimeError: if cache directory is not writable.
38 | OSError: if cache directory cannot be created.
39 | """
40 | cache_dir: Path | None = None
41 | if "VIRTUAL_ENV" in os.environ:
42 | path = Path(os.environ["VIRTUAL_ENV"]).resolve() / ".ansible"
43 | if is_writable(path):
44 | cache_dir = path
45 | else:
46 | msg = f"Found VIRTUAL_ENV={os.environ['VIRTUAL_ENV']} but we cannot use it for caching as it is not writable."
47 | warnings.warn(
48 | message=msg,
49 | stacklevel=2,
50 | source={"msg": msg},
51 | )
52 |
53 | if isolated:
54 | project_dir = project_dir.resolve() / ".ansible"
55 | if is_writable(project_dir):
56 | cache_dir = project_dir
57 | else:
58 | msg = f"Project directory {project_dir} cannot be used for caching as it is not writable."
59 | warnings.warn(msg, stacklevel=2)
60 | else:
61 | cache_dir = Path(os.environ.get("ANSIBLE_HOME", "~/.ansible")).expanduser()
62 | # This code should be never be reached because import from ansible-core
63 | # would trigger a fatal error if this location is not writable.
64 | if not is_writable(cache_dir): # pragma: no cover
65 | msg = f"Cache directory {cache_dir} is not writable."
66 | raise OSError(msg)
67 |
68 | if not cache_dir:
69 | # As "project_dir" can also be "/" and user might not be able
70 | # to write to it, we use a temporary directory as fallback.
71 | checksum = hashlib.sha256(
72 | project_dir.as_posix().encode("utf-8"),
73 | ).hexdigest()[:4]
74 |
75 | cache_dir = Path(tempfile.gettempdir()) / f".ansible-{checksum}"
76 | cache_dir.mkdir(parents=True, exist_ok=True)
77 | msg = f"Using unique temporary directory {cache_dir} for caching."
78 | warnings.warn(msg, stacklevel=2)
79 |
80 | # Ensure basic folder structure exists so `ansible-galaxy list` does not
81 | # fail with: None of the provided paths were usable. Please specify a valid path with
82 | try:
83 | for name in ("roles", "collections"):
84 | (cache_dir / name).mkdir(parents=True, exist_ok=True)
85 | except OSError as exc: # pragma: no cover
86 | msg = "Failed to create cache directory."
87 | raise RuntimeError(msg) from exc
88 |
89 | # We succeed only if the path is writable.
90 | return cache_dir
91 |
--------------------------------------------------------------------------------
/src/ansible_compat/py.typed:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ansible/ansible-compat/af6d5a8fed201502be3a4574072a4ebba857e015/src/ansible_compat/py.typed
--------------------------------------------------------------------------------
/src/ansible_compat/runtime.py:
--------------------------------------------------------------------------------
1 | """Ansible runtime environment manager."""
2 |
3 | # pylint: disable=too-many-lines
4 |
5 | from __future__ import annotations
6 |
7 | import contextlib
8 | import importlib
9 | import json
10 | import logging
11 | import os
12 | import re
13 | import shutil
14 | import site
15 | import subprocess # noqa: S404
16 | import sys
17 | import warnings
18 | from collections import OrderedDict
19 | from dataclasses import dataclass, field
20 | from pathlib import Path
21 | from typing import TYPE_CHECKING, Any, no_type_check
22 |
23 | import subprocess_tee
24 | from packaging.version import Version
25 |
26 | from ansible_compat.config import (
27 | AnsibleConfig,
28 | parse_ansible_version,
29 | )
30 | from ansible_compat.constants import (
31 | META_MAIN,
32 | MSG_INVALID_FQRL,
33 | RC_ANSIBLE_OPTIONS_ERROR,
34 | REQUIREMENT_LOCATIONS,
35 | )
36 | from ansible_compat.errors import (
37 | AnsibleCommandError,
38 | AnsibleCompatError,
39 | InvalidPrerequisiteError,
40 | MissingAnsibleError,
41 | )
42 | from ansible_compat.loaders import colpath_from_path, yaml_from_file
43 | from ansible_compat.prerun import get_cache_dir
44 |
45 | if TYPE_CHECKING: # pragma: no cover
46 | # https://github.com/PyCQA/pylint/issues/3240
47 | # pylint: disable=unsubscriptable-object
48 | CompletedProcess = subprocess.CompletedProcess[Any]
49 | from collections.abc import Callable
50 | else:
51 | CompletedProcess = subprocess.CompletedProcess
52 |
53 |
54 | _logger = logging.getLogger(__name__)
55 | # regex to extract the first version from a collection range specifier
56 | version_re = re.compile(r":[>=<]*([^,]*)")
57 | namespace_re = re.compile(r"^[a-z][a-z0-9_]+$")
58 |
59 |
60 | class AnsibleWarning(Warning):
61 | """Warnings related to Ansible runtime."""
62 |
63 |
64 | @dataclass
65 | class Collection:
66 | """Container for Ansible collection information."""
67 |
68 | name: str
69 | version: str
70 | path: Path
71 |
72 |
73 | class CollectionVersion(Version):
74 | """Collection version."""
75 |
76 | def __init__(self, version: str) -> None:
77 | """Initialize collection version."""
78 | # As packaging Version class does not support wildcard, we convert it
79 | # to "0", as this being the smallest version possible.
80 | if version == "*":
81 | version = "0"
82 | super().__init__(version)
83 |
84 |
85 | @dataclass
86 | class Plugins: # pylint: disable=too-many-instance-attributes
87 | """Dataclass to access installed Ansible plugins, uses ansible-doc to retrieve them."""
88 |
89 | runtime: Runtime
90 | become: dict[str, str] = field(init=False)
91 | cache: dict[str, str] = field(init=False)
92 | callback: dict[str, str] = field(init=False)
93 | cliconf: dict[str, str] = field(init=False)
94 | connection: dict[str, str] = field(init=False)
95 | httpapi: dict[str, str] = field(init=False)
96 | inventory: dict[str, str] = field(init=False)
97 | lookup: dict[str, str] = field(init=False)
98 | netconf: dict[str, str] = field(init=False)
99 | shell: dict[str, str] = field(init=False)
100 | vars: dict[str, str] = field(init=False)
101 | module: dict[str, str] = field(init=False)
102 | strategy: dict[str, str] = field(init=False)
103 | test: dict[str, str] = field(init=False)
104 | filter: dict[str, str] = field(init=False)
105 | role: dict[str, str] = field(init=False)
106 | keyword: dict[str, str] = field(init=False)
107 |
108 | @no_type_check
109 | def __getattribute__(self, attr: str): # noqa: ANN204
110 | """Get attribute."""
111 | if attr in {
112 | "become",
113 | "cache",
114 | "callback",
115 | "cliconf",
116 | "connection",
117 | "httpapi",
118 | "inventory",
119 | "lookup",
120 | "netconf",
121 | "shell",
122 | "vars",
123 | "module",
124 | "strategy",
125 | "test",
126 | "filter",
127 | "role",
128 | "keyword",
129 | }:
130 | try:
131 | result = super().__getattribute__(attr)
132 | except AttributeError as exc:
133 | proc = self.runtime.run(
134 | ["ansible-doc", "--json", "-l", "-t", attr],
135 | )
136 | data = json.loads(proc.stdout)
137 | if not isinstance(data, dict): # pragma: no cover
138 | msg = "Unexpected output from ansible-doc"
139 | raise AnsibleCompatError(msg) from exc
140 | result = data
141 | else:
142 | result = super().__getattribute__(attr)
143 |
144 | return result
145 |
146 |
147 | # pylint: disable=too-many-instance-attributes
148 | class Runtime:
149 | """Ansible Runtime manager."""
150 |
151 | _version: Version | None = None
152 | collections: OrderedDict[str, Collection] = OrderedDict()
153 | cache_dir: Path
154 | # Used to track if we have already initialized the Ansible runtime as attempts
155 | # to do it multiple tilmes will cause runtime warnings from within ansible-core
156 | initialized: bool = False
157 | plugins: Plugins
158 | _has_playbook_cache: dict[tuple[str, Path | None], bool] = {}
159 | require_module: bool = False
160 |
161 | def __init__(
162 | self,
163 | project_dir: Path | None = None,
164 | *,
165 | isolated: bool = False,
166 | min_required_version: str | None = None,
167 | require_module: bool = False,
168 | max_retries: int = 0,
169 | environ: dict[str, str] | None = None,
170 | verbosity: int = 0,
171 | ) -> None:
172 | """Initialize Ansible runtime environment.
173 |
174 | :param project_dir: The directory containing the Ansible project. If
175 | not mentioned it will be guessed from the current
176 | working directory.
177 | :param isolated: Assure that installation of collections or roles
178 | does not affect Ansible installation, an unique cache
179 | directory being used instead.
180 | :param min_required_version: Minimal version of Ansible required. If
181 | not found, a :class:`RuntimeError`
182 | exception is raised.
183 | :param require_module: If set, instantiation will fail if Ansible
184 | Python module is missing or is not matching
185 | the same version as the Ansible command line.
186 | That is useful for consumers that expect to
187 | also perform Python imports from Ansible.
188 | :param max_retries: Number of times it should retry network operations.
189 | Default is 0, no retries.
190 | :param environ: Environment dictionary to use, if undefined
191 | ``os.environ`` will be copied and used.
192 | :param verbosity: Verbosity level to use.
193 | """
194 | self.project_dir = project_dir or Path.cwd()
195 | self.isolated = isolated
196 | self.max_retries = max_retries
197 | self.environ = environ or os.environ.copy()
198 | if "ANSIBLE_COLLECTIONS_PATHS" in self.environ:
199 | msg = "ANSIBLE_COLLECTIONS_PATHS was detected, replace it with ANSIBLE_COLLECTIONS_PATH to continue."
200 | raise RuntimeError(msg)
201 | self.plugins = Plugins(runtime=self)
202 | self.verbosity = verbosity
203 |
204 | self.initialize_logger(level=self.verbosity)
205 |
206 | # Reduce noise from paramiko, unless user already defined PYTHONWARNINGS
207 | # paramiko/transport.py:236: CryptographyDeprecationWarning: Blowfish has been deprecated
208 | # https://github.com/paramiko/paramiko/issues/2038
209 | # As CryptographyDeprecationWarning is not a builtin, we cannot use
210 | # PYTHONWARNINGS to ignore it using category but we can use message.
211 | # https://stackoverflow.com/q/68251969/99834
212 | if "PYTHONWARNINGS" not in self.environ: # pragma: no cover
213 | self.environ["PYTHONWARNINGS"] = "ignore:Blowfish has been deprecated"
214 |
215 | self.cache_dir = get_cache_dir(self.project_dir, isolated=self.isolated)
216 |
217 | self.config = AnsibleConfig(cache_dir=self.cache_dir)
218 |
219 | # Add the sys.path to the collection paths if not isolated
220 | self._patch_collection_paths()
221 |
222 | if not self.version_in_range(lower=min_required_version):
223 | msg = f"Found incompatible version of ansible runtime {self.version}, instead of {min_required_version} or newer."
224 | raise RuntimeError(msg)
225 | if require_module:
226 | self.require_module = True
227 | self._ensure_module_available()
228 |
229 | # pylint: disable=import-outside-toplevel
230 | from ansible.utils.display import Display
231 |
232 | # pylint: disable=unused-argument
233 | def warning( # noqa: DOC103
234 | self: Display, # noqa: ARG001
235 | msg: str,
236 | formatted: bool = False, # noqa: ARG001,FBT001,FBT002
237 | *,
238 | help_text: str | None = None, # noqa: ARG001
239 | obj: Any = None, # noqa: ARG001,ANN401
240 | ) -> None: # pragma: no cover
241 | """Override ansible.utils.display.Display.warning to avoid printing warnings."""
242 | warnings.warn(
243 | message=msg,
244 | category=AnsibleWarning,
245 | stacklevel=2,
246 | source={"msg": msg},
247 | )
248 |
249 | # Monkey patch ansible warning in order to use warnings module.
250 | Display.warning = warning
251 |
252 | def initialize_logger(self, level: int = 0) -> None: # noqa: PLR6301
253 | """Set up the global logging level based on the verbosity number."""
254 | verbosity_map = {
255 | -2: logging.CRITICAL,
256 | -1: logging.ERROR,
257 | 0: logging.WARNING,
258 | 1: logging.INFO,
259 | 2: logging.DEBUG,
260 | }
261 | # Unknown logging level is treated as DEBUG
262 | logging_level = verbosity_map.get(level, logging.DEBUG)
263 | _logger.setLevel(logging_level)
264 | # Use module-level _logger instance to validate it
265 | _logger.debug("Logging initialized to level %s", logging_level)
266 |
267 | def _patch_collection_paths(self) -> None:
268 | """Modify Ansible collection path for testing purposes.
269 |
270 | - Add the sys.path to the end of collection paths.
271 | - Add the site-packages to the beginning of collection paths to match
272 | ansible-core and ade behavior and trick ansible-galaxy to install
273 | default to the venv site-packages location (isolation).
274 | """
275 | # ansible-core normal precedence is: adjacent, local paths, configured paths, site paths
276 | collections_paths: list[str] = self.config.collections_paths.copy()
277 | if self.config.collections_scan_sys_path:
278 | for path in sys.path:
279 | if (
280 | path not in collections_paths
281 | and (Path(path) / "ansible_collections").is_dir()
282 | ):
283 | collections_paths.append( # pylint: disable=E1101
284 | path,
285 | )
286 | # When inside a venv, we also add the site-packages to the end of the
287 | # collections path because this is the last place where ansible-core
288 | # will look for them. This also ensures that when calling ansible-galaxy
289 | # to install content.
290 | for path in reversed(site.getsitepackages()):
291 | if path not in collections_paths:
292 | collections_paths.append(path)
293 |
294 | if collections_paths != self.config.collections_paths:
295 | _logger.info(
296 | "Collection paths was patched to include extra directories %s",
297 | ",".join(collections_paths),
298 | )
299 | else:
300 | msg = "ANSIBLE_COLLECTIONS_SCAN_SYS_PATH is disabled, not patching collection paths. This may lead to unexpected behavior when using dev tools and prevent full isolation from user environment."
301 | _logger.warning(msg)
302 | self.config.collections_paths = collections_paths
303 |
304 | def load_collections(self) -> None:
305 | """Load collection data."""
306 | self.collections = OrderedDict()
307 | no_collections_msg = "None of the provided paths were usable"
308 |
309 | # do not use --path because it does not allow multiple values
310 | proc = self.run(
311 | [
312 | "ansible-galaxy",
313 | "collection",
314 | "list",
315 | "--format=json",
316 | ],
317 | )
318 | if proc.returncode == RC_ANSIBLE_OPTIONS_ERROR and (
319 | no_collections_msg in proc.stdout or no_collections_msg in proc.stderr
320 | ): # pragma: no cover
321 | _logger.debug("Ansible reported no installed collections at all.")
322 | return
323 | if proc.returncode != 0:
324 | _logger.error(proc)
325 | msg = f"Unable to list collections: {proc}"
326 | raise RuntimeError(msg)
327 | try:
328 | data = json.loads(proc.stdout)
329 | except json.decoder.JSONDecodeError as exc:
330 | msg = f"Unable to parse galaxy output as JSON: {proc.stdout}"
331 | raise RuntimeError(msg) from exc
332 | if not isinstance(data, dict):
333 | msg = f"Unexpected collection data, {data}"
334 | raise TypeError(msg)
335 | for path in data:
336 | if not isinstance(data[path], dict):
337 | msg = f"Unexpected collection data, {data[path]}"
338 | raise TypeError(msg)
339 | for collection, collection_info in data[path].items():
340 | if not isinstance(collection_info, dict):
341 | msg = f"Unexpected collection data, {collection_info}"
342 | raise TypeError(msg)
343 |
344 | if collection in self.collections:
345 | msg = f"Another version of '{collection}' {collection_info['version']} was found installed in {path}, only the first one will be used, {self.collections[collection].version} ({self.collections[collection].path})."
346 | _logger.warning(msg)
347 | else:
348 | self.collections[collection] = Collection(
349 | name=collection,
350 | version=collection_info["version"],
351 | path=path,
352 | )
353 |
354 | def _ensure_module_available(self) -> None:
355 | """Assure that Ansible Python module is installed and matching CLI version."""
356 | ansible_release_module = None
357 | with contextlib.suppress(ModuleNotFoundError, ImportError):
358 | ansible_release_module = importlib.import_module("ansible.release")
359 |
360 | if ansible_release_module is None:
361 | msg = "Unable to find Ansible python module."
362 | raise RuntimeError(msg)
363 |
364 | ansible_module_version = Version(
365 | ansible_release_module.__version__,
366 | )
367 | if ansible_module_version != self.version:
368 | msg = f"Ansible CLI ({self.version}) and python module ({ansible_module_version}) versions do not match. This indicates a broken execution environment."
369 | raise RuntimeError(msg)
370 |
371 | # We need to initialize the plugin loader
372 | # https://github.com/ansible/ansible-lint/issues/2945
373 | if not Runtime.initialized:
374 | col_path = [f"{self.cache_dir}/collections"]
375 | # noinspection PyProtectedMember
376 | # pylint: disable=import-outside-toplevel,no-name-in-module
377 | from ansible.plugins.loader import init_plugin_loader
378 | from ansible.utils.collection_loader._collection_finder import ( # pylint: disable=import-outside-toplevel
379 | _AnsibleCollectionFinder, # noqa: PLC2701
380 | )
381 |
382 | _AnsibleCollectionFinder( # noqa: SLF001
383 | paths=col_path,
384 | )._remove() # pylint: disable=protected-access
385 | init_plugin_loader(col_path)
386 |
387 | def clean(self) -> None:
388 | """Remove content of cache_dir."""
389 | shutil.rmtree(self.cache_dir, ignore_errors=True)
390 |
391 | def run( # ruff: disable=PLR0913
392 | self,
393 | args: str | list[str],
394 | *,
395 | retry: bool = False,
396 | tee: bool = False,
397 | env: dict[str, str] | None = None,
398 | cwd: Path | None = None,
399 | set_acp: bool = True,
400 | ) -> CompletedProcess:
401 | """Execute a command inside an Ansible environment.
402 |
403 | :param retry: Retry network operations on failures.
404 | :param tee: Also pass captured stdout/stderr to system while running.
405 | :param set_acp: Set the ANSIBLE_COLLECTIONS_PATH
406 | """
407 | if tee:
408 | run_func: Callable[..., CompletedProcess] = subprocess_tee.run
409 | else:
410 | run_func = subprocess.run
411 | env = self.environ if env is None else env.copy()
412 | # Presence of ansible debug variable or config option will prevent us
413 | # from parsing its JSON output due to extra debug messages on stdout.
414 | env["ANSIBLE_DEBUG"] = "0"
415 |
416 | # https://github.com/ansible/ansible-lint/issues/3522
417 | env["ANSIBLE_VERBOSE_TO_STDERR"] = "True"
418 |
419 | if set_acp:
420 | env["ANSIBLE_COLLECTIONS_PATH"] = ":".join(
421 | list(dict.fromkeys(self.config.collections_paths)),
422 | )
423 |
424 | for _ in range(self.max_retries + 1 if retry else 1):
425 | result = run_func(
426 | args,
427 | universal_newlines=True,
428 | check=False,
429 | stdout=subprocess.PIPE,
430 | stderr=subprocess.PIPE,
431 | env=env,
432 | cwd=str(cwd) if cwd else None,
433 | )
434 | if result.returncode == 0:
435 | break
436 | _logger.debug("Environment: %s", env)
437 | if retry:
438 | _logger.warning(
439 | "Retrying execution failure %s of: %s",
440 | result.returncode,
441 | " ".join(args),
442 | )
443 | return result
444 |
445 | @property
446 | def version(self) -> Version:
447 | """Return current Version object for Ansible.
448 |
449 | If version is not mentioned, it returns current version as detected.
450 | When version argument is mentioned, it return converts the version string
451 | to Version object in order to make it usable in comparisons.
452 | """
453 | if self._version:
454 | return self._version
455 |
456 | proc = self.run(["ansible", "--version"])
457 | if proc.returncode == 0:
458 | self._version = parse_ansible_version(proc.stdout)
459 | return self._version
460 |
461 | msg = "Unable to find a working copy of ansible executable."
462 | raise MissingAnsibleError(msg, proc=proc)
463 |
464 | def version_in_range(
465 | self,
466 | lower: str | None = None,
467 | upper: str | None = None,
468 | ) -> bool:
469 | """Check if Ansible version is inside a required range.
470 |
471 | The lower limit is inclusive and the upper one exclusive.
472 | """
473 | if lower and self.version < Version(lower):
474 | return False
475 | return not (upper and self.version >= Version(upper))
476 |
477 | def has_playbook(self, playbook: str, *, basedir: Path | None = None) -> bool:
478 | """Return true if ansible can load a given playbook.
479 |
480 | This is also used for checking if playbooks from within collections
481 | are present and if they pass syntax check.
482 | """
483 | if (playbook, basedir) in self._has_playbook_cache:
484 | return self._has_playbook_cache[playbook, basedir]
485 |
486 | proc = self.run(["ansible-playbook", "--syntax-check", playbook], cwd=basedir)
487 | result = proc.returncode == 0
488 | if not result:
489 | if not basedir:
490 | basedir = Path()
491 | msg = f"has_playbook returned false for '{basedir / playbook}' due to syntax check returning {proc.returncode}"
492 | _logger.debug(msg)
493 |
494 | # cache the result
495 | self._has_playbook_cache[playbook, basedir] = result
496 |
497 | return result
498 |
499 | def install_collection(
500 | self,
501 | collection: str | Path,
502 | *,
503 | destination: Path | None = None,
504 | force: bool = False,
505 | ) -> None:
506 | """Install an Ansible collection.
507 |
508 | Can accept arguments like:
509 | 'foo.bar:>=1.2.3'
510 | 'git+https://github.com/ansible-collections/ansible.posix.git,main'
511 | """
512 | cmd = [
513 | "ansible-galaxy",
514 | "collection",
515 | "install",
516 | "-vvv", # this is needed to make ansible display important info in case of failures
517 | ]
518 | if force:
519 | cmd.append("--force")
520 |
521 | if isinstance(collection, Path):
522 | collection = str(collection)
523 | # As ansible-galaxy install is not able to automatically determine
524 | # if the range requires a pre-release, we need to manually add the --pre
525 | # flag when needed.
526 | matches = version_re.search(collection)
527 |
528 | if (
529 | not is_url(collection)
530 | and matches
531 | and CollectionVersion(matches[1]).is_prerelease
532 | ):
533 | cmd.append("--pre")
534 |
535 | cpaths: list[str] = self.config.collections_paths
536 | if destination and str(destination) not in cpaths:
537 | # we cannot use '-p' because it breaks galaxy ability to ignore already installed collections, so
538 | # we hack ANSIBLE_COLLECTIONS_PATH instead and inject our own path there.
539 | # pylint: disable=no-member
540 | cpaths.insert(0, str(destination))
541 | cmd.append(f"{collection}")
542 |
543 | _logger.info("Running from %s : %s", Path.cwd(), " ".join(cmd))
544 | process = self.run(
545 | cmd,
546 | retry=True,
547 | env={**self.environ, "ANSIBLE_COLLECTIONS_PATH": ":".join(cpaths)},
548 | )
549 | if process.returncode != 0:
550 | msg = f"Command {' '.join(cmd)}, returned {process.returncode} code:\n{process.stdout}\n{process.stderr}"
551 | _logger.error(msg)
552 | raise InvalidPrerequisiteError(msg)
553 |
554 | def install_collection_from_disk(
555 | self,
556 | path: Path,
557 | destination: Path | None = None,
558 | ) -> None:
559 | """Build and install collection from a given disk path."""
560 | self.install_collection(path, destination=destination, force=True)
561 |
562 | # pylint: disable=too-many-branches
563 | def install_requirements( # noqa: C901
564 | self,
565 | requirement: Path,
566 | *,
567 | retry: bool = False,
568 | offline: bool = False,
569 | ) -> None:
570 | """Install dependencies from a requirements.yml.
571 |
572 | :param requirement: path to requirements.yml file
573 | :param retry: retry network operations on failures
574 | :param offline: bypass installation, may fail if requirements are not met.
575 | """
576 | if not Path(requirement).exists():
577 | return
578 | reqs_yaml = yaml_from_file(Path(requirement))
579 | if not isinstance(reqs_yaml, dict | list):
580 | msg = f"{requirement} file is not a valid Ansible requirements file."
581 | raise InvalidPrerequisiteError(msg)
582 |
583 | if isinstance(reqs_yaml, dict):
584 | for key in reqs_yaml:
585 | if key not in {"roles", "collections"}:
586 | msg = f"{requirement} file is not a valid Ansible requirements file. Only 'roles' and 'collections' keys are allowed at root level. Recognized valid locations are: {', '.join(REQUIREMENT_LOCATIONS)}"
587 | raise InvalidPrerequisiteError(msg)
588 |
589 | if isinstance(reqs_yaml, list) or "roles" in reqs_yaml:
590 | cmd = [
591 | "ansible-galaxy",
592 | "role",
593 | "install",
594 | "-r",
595 | f"{requirement}",
596 | ]
597 | if self.verbosity > 0:
598 | cmd.extend(["-" + ("v" * self.verbosity)])
599 | cmd.extend(["--roles-path", f"{self.cache_dir}/roles"])
600 |
601 | if offline:
602 | _logger.warning(
603 | "Skipped installing old role dependencies due to running in offline mode.",
604 | )
605 | else:
606 | _logger.info("Running %s", " ".join(cmd))
607 |
608 | result = self.run(cmd, retry=retry)
609 | _logger.debug(result.stdout)
610 | if result.returncode != 0:
611 | _logger.error(result.stderr)
612 | raise AnsibleCommandError(result)
613 |
614 | # Run galaxy collection install works on v2 requirements.yml
615 | if (
616 | isinstance(reqs_yaml, dict)
617 | and "collections" in reqs_yaml
618 | and reqs_yaml["collections"] is not None
619 | ):
620 | cmd = [
621 | "ansible-galaxy",
622 | "collection",
623 | "install",
624 | ]
625 | if self.verbosity > 0:
626 | cmd.extend(["-" + ("v" * self.verbosity)])
627 |
628 | for collection in reqs_yaml["collections"]:
629 | if isinstance(collection, dict) and collection.get("type", "") == "git":
630 | _logger.info(
631 | "Adding '--pre' to ansible-galaxy collection install because we detected one collection being sourced from git.",
632 | )
633 | cmd.append("--pre")
634 | break
635 | if offline:
636 | _logger.warning(
637 | "Skipped installing collection dependencies due to running in offline mode.",
638 | )
639 | else:
640 | cmd.extend(["-r", str(requirement)])
641 | _logger.info("Running %s", " ".join(cmd))
642 | result = self.run(
643 | cmd,
644 | retry=retry,
645 | )
646 | _logger.debug(result.stdout)
647 | if result.returncode != 0:
648 | _logger.error(result.stderr)
649 | raise AnsibleCommandError(result)
650 | if self.require_module:
651 | Runtime.initialized = False
652 | self._ensure_module_available()
653 |
654 | # pylint: disable=too-many-locals
655 | def prepare_environment( # noqa: C901
656 | self,
657 | required_collections: dict[str, str] | None = None,
658 | *,
659 | retry: bool = False,
660 | install_local: bool = False,
661 | offline: bool = False,
662 | role_name_check: int = 0,
663 | ) -> None:
664 | """Make dependencies available if needed."""
665 | destination: Path = self.cache_dir / "collections"
666 | if required_collections is None:
667 | required_collections = {}
668 |
669 | self._prepare_ansible_paths()
670 | # first one is standard for collection layout repos and the last two
671 | # are part of Tower specification
672 | # https://docs.ansible.com/ansible-tower/latest/html/userguide/projects.html#ansible-galaxy-support
673 | # https://docs.ansible.com/ansible-tower/latest/html/userguide/projects.html#collections-support
674 | for req_file in REQUIREMENT_LOCATIONS:
675 | file_path = Path(req_file)
676 | if self.project_dir:
677 | file_path = self.project_dir / req_file
678 | self.install_requirements(file_path, retry=retry, offline=offline)
679 |
680 | if not install_local:
681 | return
682 |
683 | for item in search_galaxy_paths(self.project_dir):
684 | # processing all found galaxy.yml files
685 | if item.exists():
686 | data = yaml_from_file(item)
687 | if isinstance(data, dict) and "dependencies" in data:
688 | for name, required_version in data["dependencies"].items():
689 | _logger.info(
690 | "Provisioning collection %s:%s from galaxy.yml",
691 | name,
692 | required_version,
693 | )
694 | self.install_collection(
695 | f"{name}{',' if is_url(name) else ':'}{required_version}",
696 | destination=destination,
697 | )
698 |
699 | for name, min_version in required_collections.items():
700 | self.install_collection(
701 | f"{name}:>={min_version}",
702 | destination=destination,
703 | )
704 |
705 | galaxy_path = self.project_dir / "galaxy.yml"
706 | if (galaxy_path).exists():
707 | # while function can return None, that would not break the logic
708 | colpath = Path(
709 | f"{destination}/ansible_collections/{colpath_from_path(self.project_dir)}",
710 | )
711 | if colpath.is_symlink():
712 | if os.path.realpath(colpath) == str(Path.cwd()):
713 | _logger.warning(
714 | "Found symlinked collection, skipping its installation.",
715 | )
716 | return
717 | _logger.warning(
718 | "Collection is symlinked, but not pointing to %s directory, so we will remove it.",
719 | Path.cwd(),
720 | )
721 | colpath.unlink()
722 |
723 | # molecule scenario within a collection
724 | self.install_collection_from_disk(
725 | galaxy_path.parent,
726 | destination=destination,
727 | )
728 | elif Path.cwd().parent.name == "roles" and Path("../../galaxy.yml").exists():
729 | # molecule scenario located within roles//molecule inside
730 | # a collection
731 | self.install_collection_from_disk(
732 | Path("../.."),
733 | destination=destination,
734 | )
735 | else:
736 | # no collection, try to recognize and install a standalone role
737 | self._install_galaxy_role(
738 | self.project_dir,
739 | role_name_check=role_name_check,
740 | ignore_errors=True,
741 | )
742 | # reload collections
743 | self.load_collections()
744 |
745 | def require_collection(
746 | self,
747 | name: str,
748 | version: str | None = None,
749 | *,
750 | install: bool = True,
751 | ) -> tuple[CollectionVersion, Path]:
752 | """Check if a minimal collection version is present or exits.
753 |
754 | In the future this method may attempt to install a missing or outdated
755 | collection before failing.
756 |
757 | Args:
758 | name: collection name
759 | version: minimal version required
760 | install: if True, attempt to install a missing collection
761 |
762 | Returns:
763 | tuple of (found_version, collection_path)
764 | """
765 | try:
766 | ns, coll = name.split(".", 1)
767 | except ValueError as exc:
768 | msg = f"Invalid collection name supplied: {name}%s"
769 | raise InvalidPrerequisiteError(
770 | msg,
771 | ) from exc
772 |
773 | paths: list[str] = self.config.collections_paths
774 | if not paths or not isinstance(paths, list):
775 | msg = f"Unable to determine ansible collection paths. ({paths})"
776 | raise InvalidPrerequisiteError(
777 | msg,
778 | )
779 |
780 | for path in paths:
781 | collpath = Path(path) / "ansible_collections" / ns / coll
782 | if collpath.exists():
783 | mpath = collpath / "MANIFEST.json"
784 | if not mpath.exists():
785 | msg = f"Found collection at '{collpath}' but missing MANIFEST.json, cannot get info."
786 | _logger.fatal(msg)
787 | raise InvalidPrerequisiteError(msg)
788 |
789 | with mpath.open(encoding="utf-8") as f:
790 | manifest = json.loads(f.read())
791 | found_version = CollectionVersion(
792 | manifest["collection_info"]["version"],
793 | )
794 | if version and found_version < CollectionVersion(version):
795 | if install:
796 | self.install_collection(f"{name}:>={version}")
797 | self.require_collection(name, version, install=False)
798 | else:
799 | msg = f"Found {name} collection {found_version} but {version} or newer is required."
800 | _logger.fatal(msg)
801 | raise InvalidPrerequisiteError(msg)
802 | return found_version, collpath.resolve()
803 | if install:
804 | self.install_collection(f"{name}:>={version}" if version else name)
805 | return self.require_collection(
806 | name=name,
807 | version=version,
808 | install=False,
809 | )
810 | msg = f"Collection '{name}' not found in '{paths}'"
811 | _logger.fatal(msg)
812 | raise InvalidPrerequisiteError(msg)
813 |
814 | def _prepare_ansible_paths(self) -> None:
815 | """Configure Ansible environment variables."""
816 | try:
817 | library_paths: list[str] = self.config.default_module_path.copy()
818 | roles_path: list[str] = self.config.default_roles_path.copy()
819 | collections_path: list[str] = self.config.collections_paths.copy()
820 | except AttributeError as exc:
821 | msg = "Unexpected ansible configuration"
822 | raise RuntimeError(msg) from exc
823 |
824 | alterations_list: list[tuple[list[str], str, bool]] = [
825 | (library_paths, "plugins/modules", True),
826 | (roles_path, "roles", True),
827 | ]
828 |
829 | alterations_list.extend(
830 | (
831 | [
832 | (roles_path, f"{self.cache_dir}/roles", False),
833 | (library_paths, f"{self.cache_dir}/modules", False),
834 | (collections_path, f"{self.cache_dir}/collections", False),
835 | ]
836 | if self.isolated
837 | else []
838 | ),
839 | )
840 |
841 | for path_list, path_, must_be_present in alterations_list:
842 | path = Path(path_)
843 | if not path.exists():
844 | if must_be_present:
845 | continue
846 | path.mkdir(parents=True, exist_ok=True)
847 | if str(path) not in path_list:
848 | path_list.insert(0, str(path))
849 |
850 | if library_paths != self.config.DEFAULT_MODULE_PATH:
851 | self._update_env("ANSIBLE_LIBRARY", library_paths)
852 | if collections_path != self.config.default_collections_path:
853 | self._update_env("ANSIBLE_COLLECTIONS_PATH", collections_path)
854 | if roles_path != self.config.default_roles_path:
855 | self._update_env("ANSIBLE_ROLES_PATH", roles_path)
856 |
857 | def _get_roles_path(self) -> Path:
858 | """Return roles installation path.
859 |
860 | If `self.isolated` is set to `True`, `self.cache_dir` would be
861 | created, then it returns the `self.cache_dir/roles`. When `self.isolated` is
862 | not mentioned or set to `False`, it returns the first path in
863 | `default_roles_path`.
864 | """
865 | path = Path(f"{self.cache_dir}/roles")
866 | return path
867 |
868 | def _install_galaxy_role(
869 | self,
870 | project_dir: Path,
871 | role_name_check: int = 0,
872 | *,
873 | ignore_errors: bool = False,
874 | ) -> None:
875 | """Detect standalone galaxy role and installs it.
876 |
877 | Args:
878 | project_dir: path to the role
879 | role_name_check: logic to used to check role name
880 | 0: exit with error if name is not compliant (default)
881 | 1: warn if name is not compliant
882 | 2: bypass any name checking
883 |
884 | ignore_errors: if True, bypass installing invalid roles.
885 |
886 | Our implementation aims to match ansible-galaxy's behavior for installing
887 | roles from a tarball or scm. For example ansible-galaxy will install a role
888 | that has both galaxy.yml and meta/main.yml present but empty. Also missing
889 | galaxy.yml is accepted but missing meta/main.yml is not.
890 | """
891 | yaml = None
892 | galaxy_info = {}
893 |
894 | for meta_main in META_MAIN:
895 | meta_filename = Path(project_dir) / meta_main
896 |
897 | if meta_filename.exists():
898 | break
899 | else:
900 | if ignore_errors:
901 | return
902 |
903 | yaml = yaml_from_file(meta_filename)
904 |
905 | if yaml and "galaxy_info" in yaml:
906 | galaxy_info = yaml["galaxy_info"]
907 |
908 | fqrn = _get_role_fqrn(galaxy_info, project_dir)
909 |
910 | if role_name_check in {0, 1}:
911 | if not re.match(r"[a-z0-9][a-z0-9_-]+\.[a-z][a-z0-9_]+$", fqrn):
912 | msg = MSG_INVALID_FQRL.format(fqrn)
913 | if role_name_check == 1:
914 | _logger.warning(msg)
915 | else:
916 | _logger.error(msg)
917 | raise InvalidPrerequisiteError(msg)
918 | elif "role_name" in galaxy_info:
919 | # when 'role-name' is in skip_list, we stick to plain role names
920 | role_namespace = _get_galaxy_role_ns(galaxy_info)
921 | role_name = _get_galaxy_role_name(galaxy_info)
922 | fqrn = f"{role_namespace}{role_name}"
923 | else:
924 | fqrn = Path(project_dir).absolute().name
925 | path = self._get_roles_path()
926 | path.mkdir(parents=True, exist_ok=True)
927 | link_path = path / fqrn
928 | # despite documentation stating that is_file() reports true for symlinks,
929 | # it appears that is_dir() reports true instead, so we rely on exists().
930 | target = Path(project_dir).absolute()
931 | if not link_path.exists() or (
932 | link_path.is_symlink() and link_path.readlink() != target
933 | ):
934 | # must call unlink before checking exists because a broken
935 | # link reports as not existing and we want to repair it
936 | link_path.unlink(missing_ok=True)
937 | # https://github.com/python/cpython/issues/73843
938 | link_path.symlink_to(str(target), target_is_directory=True)
939 | _logger.info(
940 | "Using %s symlink to current repository in order to enable Ansible to find the role using its expected full name.",
941 | link_path,
942 | )
943 |
944 | def _update_env(self, varname: str, value: list[str], default: str = "") -> None:
945 | """Update colon based environment variable if needed.
946 |
947 | New values are prepended to make sure they take precedence.
948 | """
949 | if not value:
950 | return
951 | orig_value = self.environ.get(varname, default)
952 | if orig_value:
953 | # we just want to avoid repeating the same entries, but order is important
954 | value = list(dict.fromkeys([*value, *orig_value.split(":")]))
955 | value_str = ":".join(value)
956 | if value_str != self.environ.get(varname, ""):
957 | self.environ[varname] = value_str
958 | _logger.info("Set %s=%s", varname, value_str)
959 |
960 |
961 | def _get_role_fqrn(galaxy_infos: dict[str, Any], project_dir: Path) -> str:
962 | """Compute role fqrn."""
963 | role_namespace = _get_galaxy_role_ns(galaxy_infos)
964 | role_name = _get_galaxy_role_name(galaxy_infos)
965 |
966 | if len(role_name) == 0:
967 | role_name = Path(project_dir).absolute().name
968 | role_name = re.sub(r"(ansible-|ansible-role-)", "", role_name).split(
969 | ".",
970 | maxsplit=2,
971 | )[-1]
972 |
973 | return f"{role_namespace}{role_name}"
974 |
975 |
976 | def _get_galaxy_role_ns(galaxy_infos: dict[str, Any]) -> str:
977 | """Compute role namespace from meta/main.yml, including trailing dot."""
978 | role_namespace = galaxy_infos.get("namespace", "")
979 | if len(role_namespace) == 0:
980 | role_namespace = galaxy_infos.get("author", "")
981 | if not isinstance(role_namespace, str):
982 | msg = f"Role namespace must be string, not {role_namespace}"
983 | raise AnsibleCompatError(msg)
984 | # if there's a space in the name space, it's likely author name
985 | # and not the galaxy login, so act as if there was no namespace
986 | if not role_namespace or re.match(r"^\w+ \w+", role_namespace):
987 | role_namespace = ""
988 | else:
989 | role_namespace = f"{role_namespace}."
990 | return role_namespace
991 |
992 |
993 | def _get_galaxy_role_name(galaxy_infos: dict[str, Any]) -> str:
994 | """Compute role name from meta/main.yml."""
995 | result = galaxy_infos.get("role_name", "")
996 | if not isinstance(result, str):
997 | return ""
998 | return result
999 |
1000 |
1001 | def search_galaxy_paths(search_dir: Path) -> list[Path]:
1002 | """Search for galaxy paths (only one level deep).
1003 |
1004 | Returns:
1005 | list[Path]: List of galaxy.yml found.
1006 | """
1007 | galaxy_paths: list[Path] = []
1008 | for item in [Path(), *search_dir.iterdir()]:
1009 | # We ignore any folders that are not valid namespaces, just like
1010 | # ansible galaxy does at this moment.
1011 | file_path = item.resolve()
1012 | if file_path.is_file() and file_path.name == "galaxy.yml":
1013 | galaxy_paths.append(file_path)
1014 | continue
1015 | if file_path.is_dir() and namespace_re.match(file_path.name):
1016 | file_path /= "galaxy.yml"
1017 | if file_path.exists():
1018 | galaxy_paths.append(file_path)
1019 | return galaxy_paths
1020 |
1021 |
1022 | def is_url(name: str) -> bool:
1023 | """Return True if a dependency name looks like an URL."""
1024 | return bool(re.match(r"^git[+@]", name))
1025 |
--------------------------------------------------------------------------------
/src/ansible_compat/schema.py:
--------------------------------------------------------------------------------
1 | """Utils for JSON Schema validation."""
2 |
3 | from __future__ import annotations
4 |
5 | import json
6 | from collections.abc import Mapping, Sequence
7 | from dataclasses import dataclass
8 | from typing import TYPE_CHECKING
9 |
10 | import jsonschema
11 | from jsonschema.validators import validator_for
12 |
13 | if TYPE_CHECKING: # pragma: no cover
14 | from ansible_compat.types import JSON
15 |
16 |
17 | def to_path(schema_path: Sequence[str | int]) -> str:
18 | """Flatten a path to a dot delimited string.
19 |
20 | Args:
21 | schema_path: The schema path
22 |
23 | Returns:
24 | The dot delimited path
25 | """
26 | return ".".join(str(index) for index in schema_path)
27 |
28 |
29 | def json_path(absolute_path: Sequence[str | int]) -> str:
30 | """Flatten a data path to a dot delimited string.
31 |
32 | Args:
33 | absolute_path: The path
34 |
35 | Returns:
36 | The dot delimited string
37 | """
38 | path = "$"
39 | for elem in absolute_path:
40 | if isinstance(elem, int):
41 | path += "[" + str(elem) + "]"
42 | else:
43 | path += "." + elem
44 | return path
45 |
46 |
47 | @dataclass(order=True)
48 | class JsonSchemaError:
49 | # pylint: disable=too-many-instance-attributes
50 | """Data structure to hold a json schema validation error."""
51 |
52 | # order of attributes below is important for sorting
53 | schema_path: str
54 | data_path: str
55 | json_path: str
56 | message: str
57 | expected: bool | int | str
58 | relative_schema: str
59 | validator: str
60 | found: str
61 |
62 | def to_friendly(self) -> str:
63 | """Provide a friendly explanation of the error.
64 |
65 | Return: The error message
66 | """
67 | return f"In '{self.data_path}': {self.message}."
68 |
69 |
70 | def validate(
71 | schema: JSON,
72 | data: JSON,
73 | ) -> list[JsonSchemaError]:
74 | """Validate some data against a JSON schema.
75 |
76 | Args:
77 | schema: the JSON schema to use for validation
78 | data: The data to validate
79 |
80 | Returns:
81 | Any errors encountered
82 |
83 | Raises:
84 | jsonschema.SchemaError: if the schema is invalid
85 | """
86 | errors: list[JsonSchemaError] = []
87 |
88 | if isinstance(schema, str):
89 | schema = json.loads(schema)
90 | try:
91 | if not isinstance(schema, Mapping):
92 | msg = "Invalid schema, must be a mapping"
93 | raise jsonschema.SchemaError(msg) # noqa: TRY301
94 | validator = validator_for(schema)
95 | validator.check_schema(schema)
96 | except jsonschema.SchemaError as exc:
97 | error = JsonSchemaError(
98 | message=str(exc),
99 | data_path="schema sanity check",
100 | json_path="",
101 | schema_path="",
102 | relative_schema="",
103 | expected="",
104 | validator="",
105 | found="",
106 | )
107 | errors.append(error)
108 | return errors
109 |
110 | for validation_error in validator(schema).iter_errors(data):
111 | if isinstance(validation_error, jsonschema.ValidationError):
112 | error = JsonSchemaError(
113 | message=validation_error.message,
114 | data_path=to_path(validation_error.absolute_path),
115 | json_path=json_path(validation_error.absolute_path),
116 | schema_path=to_path(validation_error.schema_path),
117 | relative_schema=str(validation_error.schema),
118 | expected=str(validation_error.validator_value),
119 | validator=str(validation_error.validator),
120 | found=str(validation_error.instance),
121 | )
122 | errors.append(error)
123 | return sorted(errors)
124 |
--------------------------------------------------------------------------------
/src/ansible_compat/types.py:
--------------------------------------------------------------------------------
1 | """Custom types."""
2 |
3 | from __future__ import annotations
4 |
5 | from collections.abc import Mapping, Sequence
6 | from typing import TypeAlias
7 |
8 | JSON: TypeAlias = dict[str, "JSON"] | list["JSON"] | str | int | float | bool | None
9 | JSON_ro: TypeAlias = (
10 | Mapping[str, "JSON_ro"] | Sequence["JSON_ro"] | str | int | float | bool | None
11 | )
12 |
13 | __all__ = ["JSON", "JSON_ro"]
14 |
--------------------------------------------------------------------------------
/test/__init__.py:
--------------------------------------------------------------------------------
1 | """Tests for ansible_compat package."""
2 |
--------------------------------------------------------------------------------
/test/assets/galaxy_paths/.bar/galaxy.yml:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ansible/ansible-compat/af6d5a8fed201502be3a4574072a4ebba857e015/test/assets/galaxy_paths/.bar/galaxy.yml
--------------------------------------------------------------------------------
/test/assets/galaxy_paths/foo/galaxy.yml:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ansible/ansible-compat/af6d5a8fed201502be3a4574072a4ebba857e015/test/assets/galaxy_paths/foo/galaxy.yml
--------------------------------------------------------------------------------
/test/assets/requirements-invalid-collection.yml:
--------------------------------------------------------------------------------
1 | # "ansible-galaxy collection install" is expected to fail this invalid file
2 | collections:
3 | - foo: bar
4 |
--------------------------------------------------------------------------------
/test/assets/requirements-invalid-role.yml:
--------------------------------------------------------------------------------
1 | # file expected to make "ansible-galaxy role install" to fail
2 | roles:
3 | - this_role_does_not_exist
4 |
--------------------------------------------------------------------------------
/test/assets/validate0_data.json:
--------------------------------------------------------------------------------
1 | { "environment": { "a": false, "b": true, "c": "foo" } }
2 |
--------------------------------------------------------------------------------
/test/assets/validate0_expected.json:
--------------------------------------------------------------------------------
1 | [
2 | {
3 | "message": "False is not of type 'string'",
4 | "data_path": "environment.a",
5 | "json_path": "$.environment.a",
6 | "schema_path": "properties.environment.additionalProperties.type",
7 | "relative_schema": "{'type': 'string'}",
8 | "expected": "string",
9 | "validator": "type",
10 | "found": "False"
11 | },
12 | {
13 | "message": "True is not of type 'string'",
14 | "data_path": "environment.b",
15 | "json_path": "$.environment.b",
16 | "schema_path": "properties.environment.additionalProperties.type",
17 | "relative_schema": "{'type': 'string'}",
18 | "expected": "string",
19 | "validator": "type",
20 | "found": "True"
21 | }
22 | ]
23 |
--------------------------------------------------------------------------------
/test/assets/validate0_schema.json:
--------------------------------------------------------------------------------
1 | {
2 | "$schema": "http://json-schema.org/draft-07/schema#",
3 | "properties": {
4 | "environment": {
5 | "type": "object",
6 | "additionalProperties": { "type": "string" }
7 | }
8 | }
9 | }
10 |
--------------------------------------------------------------------------------
/test/collections/acme.broken/galaxy.yml:
--------------------------------------------------------------------------------
1 | foo: that is not a valid collection!
2 |
--------------------------------------------------------------------------------
/test/collections/acme.goodies/galaxy.yml:
--------------------------------------------------------------------------------
1 | name: goodies
2 | namespace: acme
3 | version: 1.0.0
4 | readme: README.md
5 | authors:
6 | - Red Hat
7 | description: Sample collection to use with molecule
8 | dependencies:
9 | community.molecule: ">=0.1.0" # used to also test '=>' condition
10 | ansible.utils: "*" # used to also test '*'
11 | git+https://github.com/ansible-collections/community.crypto.git: main # tests ability to install from git
12 | build_ignore:
13 | - "*.egg-info"
14 | - .DS_Store
15 | - .eggs
16 | - .gitignore
17 | - .mypy_cache
18 | - .pytest_cache
19 | - .stestr
20 | - .stestr.conf
21 | - .tox
22 | - .vscode
23 | - MANIFEST.in
24 | - build
25 | - dist
26 | - doc
27 | - report.html
28 | - setup.cfg
29 | - setup.py
30 | - "tests/unit/*.*"
31 | - README.rst
32 | - tox.ini
33 |
34 | license_file: LICENSE
35 |
--------------------------------------------------------------------------------
/test/collections/acme.goodies/molecule/default/converge.yml:
--------------------------------------------------------------------------------
1 | ---
2 | - name: Converge
3 | hosts: localhost
4 | tasks:
5 | - name: "Include sample role from current collection"
6 | include_role:
7 | name: acme.goodies.baz
8 |
--------------------------------------------------------------------------------
/test/collections/acme.goodies/molecule/default/molecule.yml:
--------------------------------------------------------------------------------
1 | ---
2 | dependency:
3 | name: galaxy
4 | driver:
5 | name: delegated
6 | platforms:
7 | - name: instance
8 | provisioner:
9 | name: ansible
10 | verifier:
11 | name: ansible
12 |
--------------------------------------------------------------------------------
/test/collections/acme.goodies/roles/baz/molecule/deep_scenario/converge.yml:
--------------------------------------------------------------------------------
1 | ---
2 | - name: Converge
3 | hosts: localhost
4 | tasks:
5 | - name: "Sample testing task part of deep_scenario"
6 | include_role:
7 | name: acme.goodies.baz
8 |
--------------------------------------------------------------------------------
/test/collections/acme.goodies/roles/baz/molecule/deep_scenario/molecule.yml:
--------------------------------------------------------------------------------
1 | ---
2 | dependency:
3 | name: galaxy
4 | driver:
5 | name: delegated
6 | platforms:
7 | - name: instance
8 | provisioner:
9 | name: ansible
10 | verifier:
11 | name: ansible
12 |
--------------------------------------------------------------------------------
/test/collections/acme.goodies/roles/baz/tasks/main.yml:
--------------------------------------------------------------------------------
1 | - name: "some task inside foo.bar collection"
2 | debug:
3 | msg: "hello world!"
4 |
--------------------------------------------------------------------------------
/test/collections/acme.goodies/tests/requirements.yml:
--------------------------------------------------------------------------------
1 | collections:
2 | - name: ansible.posix
3 | version: "1.5.4"
4 |
--------------------------------------------------------------------------------
/test/collections/acme.minimal/galaxy.yml:
--------------------------------------------------------------------------------
1 | name: minimal
2 | namespace: acme
3 | version: 1.0.0
4 | readme: README.md
5 | authors:
6 | - Red Hat
7 | description: Sample collection to use with molecule
8 | build_ignore:
9 | - "*.egg-info"
10 | - .DS_Store
11 | - .eggs
12 | - .gitignore
13 | - .mypy_cache
14 | - .pytest_cache
15 | - .stestr
16 | - .stestr.conf
17 | - .tox
18 | - .vscode
19 | - MANIFEST.in
20 | - build
21 | - dist
22 | - doc
23 | - report.html
24 | - setup.cfg
25 | - setup.py
26 | - "tests/unit/*.*"
27 | - README.rst
28 | - tox.ini
29 |
30 | license_file: LICENSE
31 |
--------------------------------------------------------------------------------
/test/conftest.py:
--------------------------------------------------------------------------------
1 | """Pytest fixtures."""
2 |
3 | import importlib.metadata
4 | import json
5 | import pathlib
6 | import subprocess
7 | import sys
8 | from collections.abc import Callable, Generator
9 | from pathlib import Path
10 |
11 | import pytest
12 |
13 | from ansible_compat.runtime import Runtime
14 |
15 |
16 | @pytest.fixture
17 | # pylint: disable=unused-argument
18 | def runtime(scope: str = "session") -> Generator[Runtime, None, None]: # noqa: ARG001
19 | """Isolated runtime fixture."""
20 | instance = Runtime(isolated=True)
21 | yield instance
22 | instance.clean()
23 |
24 |
25 | @pytest.fixture
26 | # pylint: disable=unused-argument
27 | def runtime_tmp(
28 | tmp_path: pathlib.Path,
29 | scope: str = "session", # noqa: ARG001
30 | ) -> Generator[Runtime, None, None]:
31 | """Isolated runtime fixture using a temp directory."""
32 | instance = Runtime(project_dir=tmp_path, isolated=True)
33 | yield instance
34 | instance.clean()
35 |
36 |
37 | def query_pkg_version(pkg: str) -> str:
38 | """Get the version of a current installed package.
39 |
40 | :param pkg: Package name
41 | :return: Package version
42 | """
43 | return importlib.metadata.version(pkg)
44 |
45 |
46 | @pytest.fixture
47 | def pkg_version() -> Callable[[str], str]:
48 | """Get the version of a current installed package.
49 |
50 | :return: Callable function to get package version
51 | """
52 | return query_pkg_version
53 |
54 |
55 | class VirtualEnvironment:
56 | """Virtualenv wrapper."""
57 |
58 | def __init__(self, path: Path) -> None:
59 | """Initialize.
60 |
61 | :param path: Path to virtualenv
62 | """
63 | self.project = path
64 | self.venv_path = self.project / "venv"
65 | self.venv_bin_path = self.venv_path / "bin"
66 | self.venv_python_path = self.venv_bin_path / "python"
67 |
68 | def create(self) -> None:
69 | """Create virtualenv."""
70 | cmd = [str(sys.executable), "-m", "venv", str(self.venv_path)]
71 | subprocess.check_call(args=cmd)
72 | # Install this package into the virtual environment
73 | self.install(f"{__file__}/../..")
74 |
75 | def install(self, *packages: str) -> None:
76 | """Install packages in virtualenv.
77 |
78 | :param packages: Packages to install
79 | """
80 | cmd = [str(self.venv_python_path), "-m", "pip", "install", *packages]
81 | subprocess.check_call(args=cmd)
82 |
83 | def python_script_run(self, script: str) -> subprocess.CompletedProcess[str]:
84 | """Run command in project dir using venv.
85 |
86 | :param args: Command to run
87 | """
88 | proc = subprocess.run(
89 | args=[self.venv_python_path, "-c", script],
90 | capture_output=True,
91 | cwd=self.project,
92 | check=False,
93 | text=True,
94 | )
95 | return proc
96 |
97 | def site_package_dirs(self) -> list[Path]:
98 | """Get site packages.
99 |
100 | :return: List of site packages dirs
101 | """
102 | script = "import json, site; print(json.dumps(site.getsitepackages()))"
103 | proc = subprocess.run(
104 | args=[self.venv_python_path, "-c", script],
105 | capture_output=True,
106 | check=False,
107 | text=True,
108 | )
109 | dirs = json.loads(proc.stdout)
110 | if not isinstance(dirs, list):
111 | msg = "Expected list of site packages"
112 | raise TypeError(msg)
113 | sanitized = list({Path(d).resolve() for d in dirs})
114 | return sanitized
115 |
116 |
117 | @pytest.fixture(scope="module")
118 | def venv_module(tmp_path_factory: pytest.TempPathFactory) -> VirtualEnvironment:
119 | """Create a virtualenv in a temporary directory.
120 |
121 | :param tmp_path: pytest fixture for temp path
122 | :return: VirtualEnvironment instance
123 | """
124 | test_project = tmp_path_factory.mktemp(basename="test_project-", numbered=True)
125 | venv_ = VirtualEnvironment(test_project)
126 | venv_.create()
127 | return venv_
128 |
--------------------------------------------------------------------------------
/test/roles/acme.missing_deps/meta/main.yml:
--------------------------------------------------------------------------------
1 | ---
2 | galaxy_info:
3 | name: missing_deps
4 | namespace: acme
5 | description: foo
6 | license: GPL
7 | min_ansible_version: "2.10"
8 | platforms: []
9 |
--------------------------------------------------------------------------------
/test/roles/acme.missing_deps/requirements.yml:
--------------------------------------------------------------------------------
1 | collections:
2 | - foo.bar # collection that does not exist, so we can test offline mode
3 | roles:
4 | - this_role_does_not_exist # and also role that does not exist
5 |
--------------------------------------------------------------------------------
/test/roles/acme.sample2/meta/main.yml:
--------------------------------------------------------------------------------
1 | ---
2 | dependencies: []
3 |
4 | galaxy_info:
5 | # role_name is missing in order to test deduction from folder name
6 | author: acme
7 | description: ACME sample role
8 | company: "ACME LTD"
9 | license: MIT
10 | min_ansible_version: "2.9"
11 | platforms:
12 | - name: Debian
13 | versions:
14 | - any
15 | galaxy_tags:
16 | - samples
17 |
--------------------------------------------------------------------------------
/test/roles/ansible-role-sample/meta/main.yml:
--------------------------------------------------------------------------------
1 | ---
2 | dependencies: []
3 |
4 | galaxy_info:
5 | role_name: sample
6 | author: acme
7 | description: ACME sample role
8 | company: "ACME LTD"
9 | license: MIT
10 | min_ansible_version: "2.9"
11 | platforms:
12 | - name: Debian
13 | versions:
14 | - any
15 | galaxy_tags:
16 | - samples
17 |
--------------------------------------------------------------------------------
/test/roles/sample3/meta/main.yml:
--------------------------------------------------------------------------------
1 | ---
2 | dependencies: []
3 |
4 | galaxy_info:
5 | # role_name is missing in order to test deduction from folder name
6 | author: acme
7 | description: ACME samble role
8 | company: "ACME LTD"
9 | license: MIT
10 | min_ansible_version: "2.9"
11 | platforms:
12 | - name: Debian
13 | versions:
14 | - any
15 | galaxy_tags:
16 | - samples
17 |
--------------------------------------------------------------------------------
/test/roles/sample4/meta/main.yml:
--------------------------------------------------------------------------------
1 | ---
2 | dependencies: []
3 |
4 | galaxy_info:
5 | # role_name is missing in order to test deduction from folder name
6 | author: acme
7 | description: ACME samble role
8 | company: "ACME LTD"
9 | license: MIT
10 | min_ansible_version: "2.9"
11 | platforms:
12 | - name: Debian
13 | versions:
14 | - any
15 | galaxy_tags:
16 | - samples
17 |
--------------------------------------------------------------------------------
/test/test_api.py:
--------------------------------------------------------------------------------
1 | """Tests for ansible_compat package."""
2 |
3 |
4 | def test_placeholder() -> None:
5 | """Placeholder test."""
6 |
--------------------------------------------------------------------------------
/test/test_config.py:
--------------------------------------------------------------------------------
1 | """Tests for ansible_compat.config submodule."""
2 |
3 | import copy
4 | import subprocess
5 |
6 | import pytest
7 | from _pytest.monkeypatch import MonkeyPatch
8 | from packaging.version import Version
9 |
10 | from ansible_compat.config import (
11 | AnsibleConfig,
12 | ansible_version,
13 | parse_ansible_version,
14 | )
15 | from ansible_compat.errors import InvalidPrerequisiteError, MissingAnsibleError
16 |
17 |
18 | def test_config() -> None:
19 | """Checks that config vars are loaded with their expected type."""
20 | config = AnsibleConfig()
21 | assert isinstance(config.ACTION_WARNINGS, bool)
22 | assert isinstance(config.CACHE_PLUGIN_PREFIX, str)
23 | assert isinstance(config.CONNECTION_FACTS_MODULES, dict)
24 | assert config.ANSIBLE_COW_PATH is None
25 | assert isinstance(config.NETWORK_GROUP_MODULES, list)
26 | assert isinstance(config.DEFAULT_FORKS, int | type(None))
27 |
28 | # check lowercase and older name aliasing
29 | assert isinstance(config.collections_paths, list)
30 | assert isinstance(config.collections_path, list)
31 | assert config.collections_paths == config.collections_path
32 |
33 | # check if we can access the special data member
34 | assert config.data["ACTION_WARNINGS"] == config.ACTION_WARNINGS
35 |
36 | with pytest.raises(AttributeError):
37 | _ = config.THIS_DOES_NOT_EXIST
38 |
39 |
40 | def test_config_with_dump() -> None:
41 | """Tests that config can parse given dumps."""
42 | config = AnsibleConfig(config_dump="ACTION_WARNINGS(default) = True")
43 | assert config.ACTION_WARNINGS is True
44 |
45 |
46 | def test_config_copy() -> None:
47 | """Checks ability to use copy/deepcopy."""
48 | config = AnsibleConfig()
49 | new_config = copy.copy(config)
50 | assert isinstance(new_config, AnsibleConfig)
51 | assert new_config is not config
52 | # deepcopy testing
53 | new_config = copy.deepcopy(config)
54 | assert isinstance(new_config, AnsibleConfig)
55 | assert new_config is not config
56 |
57 |
58 | def test_parse_ansible_version_fail() -> None:
59 | """Checks that parse_ansible_version raises an error on invalid input."""
60 | with pytest.raises(
61 | InvalidPrerequisiteError,
62 | match="Unable to parse ansible cli version",
63 | ):
64 | parse_ansible_version("foo")
65 |
66 |
67 | def test_ansible_version_missing(monkeypatch: MonkeyPatch) -> None:
68 | """Validate ansible_version behavior when ansible is missing."""
69 | monkeypatch.setattr(
70 | "subprocess.run",
71 | lambda *args, **kwargs: subprocess.CompletedProcess( # noqa: ARG005
72 | args=[],
73 | returncode=1,
74 | ),
75 | )
76 | with pytest.raises(
77 | MissingAnsibleError,
78 | match=r"Unable to find a working copy of ansible executable.",
79 | ):
80 | # bypassing lru cache
81 | ansible_version.__wrapped__()
82 |
83 |
84 | def test_ansible_version() -> None:
85 | """Validate ansible_version behavior."""
86 | assert ansible_version() >= Version("1.0")
87 |
88 |
89 | def test_ansible_version_arg() -> None:
90 | """Validate ansible_version behavior."""
91 | assert ansible_version("2.0") >= Version("1.0")
92 |
--------------------------------------------------------------------------------
/test/test_configuration_example.py:
--------------------------------------------------------------------------------
1 | """Sample usage of AnsibleConfig."""
2 |
3 | from ansible_compat.config import AnsibleConfig
4 |
5 |
6 | def test_example_config() -> None:
7 | """Test basic functionality of AnsibleConfig."""
8 | cfg = AnsibleConfig()
9 | assert isinstance(cfg.ACTION_WARNINGS, bool)
10 | # you can also use lowercase:
11 | assert isinstance(cfg.action_warnings, bool)
12 | # you can also use it as dictionary
13 | assert cfg["action_warnings"] == cfg.action_warnings
14 |
--------------------------------------------------------------------------------
/test/test_loaders.py:
--------------------------------------------------------------------------------
1 | """Test for ansible_compat.loaders module."""
2 |
3 | from pathlib import Path
4 |
5 | from ansible_compat.loaders import colpath_from_path
6 |
7 |
8 | def test_colpath_from_path() -> None:
9 | """Test colpath_from_path non existing path."""
10 | assert colpath_from_path(Path("/foo/bar/")) is None
11 |
--------------------------------------------------------------------------------
/test/test_prerun.py:
--------------------------------------------------------------------------------
1 | """Tests for ansible_compat.prerun module."""
2 |
3 | from __future__ import annotations
4 |
5 | import tempfile
6 | from pathlib import Path
7 | from typing import TYPE_CHECKING
8 |
9 | import pytest
10 |
11 | if TYPE_CHECKING:
12 | from _pytest.monkeypatch import MonkeyPatch
13 |
14 | from ansible_compat.prerun import get_cache_dir
15 |
16 |
17 | def test_get_cache_dir_relative() -> None:
18 | """Test behaviors of get_cache_dir."""
19 | relative_path = Path()
20 | abs_path = relative_path.resolve()
21 | assert get_cache_dir(relative_path) == get_cache_dir(abs_path)
22 |
23 |
24 | def test_get_cache_dir_no_isolation_no_venv(monkeypatch: MonkeyPatch) -> None:
25 | """Test behaviors of get_cache_dir.
26 |
27 | Args:
28 | monkeypatch: Pytest fixture for monkeypatching
29 | """
30 | monkeypatch.delenv("VIRTUAL_ENV", raising=False)
31 | monkeypatch.delenv("ANSIBLE_HOME", raising=False)
32 | assert get_cache_dir(Path(), isolated=False) == Path("~/.ansible").expanduser()
33 |
34 |
35 | def test_get_cache_dir_isolation_no_venv(monkeypatch: MonkeyPatch) -> None:
36 | """Test behaviors of get_cache_dir.
37 |
38 | Args:
39 | monkeypatch: Pytest fixture for monkeypatching
40 | """
41 | monkeypatch.delenv("VIRTUAL_ENV", raising=False)
42 | monkeypatch.delenv("ANSIBLE_HOME", raising=False)
43 | cache_dir = get_cache_dir(Path(), isolated=True)
44 | assert cache_dir == Path().cwd() / ".ansible"
45 |
46 |
47 | def test_get_cache_dir_isolation_no_venv_root(monkeypatch: MonkeyPatch) -> None:
48 | """Test behaviors of get_cache_dir.
49 |
50 | Args:
51 | monkeypatch: Pytest fixture for monkeypatching
52 | """
53 | monkeypatch.delenv("VIRTUAL_ENV", raising=False)
54 | monkeypatch.delenv("ANSIBLE_HOME", raising=False)
55 | with (
56 | pytest.warns(
57 | UserWarning,
58 | match=r"Project directory /.ansible cannot be used for caching as it is not writable.",
59 | ),
60 | pytest.warns(
61 | UserWarning,
62 | match=r"Using unique temporary directory .* for caching.",
63 | ),
64 | ):
65 | cache_dir = get_cache_dir(Path("/"), isolated=True)
66 | assert cache_dir.as_posix().startswith(tempfile.gettempdir())
67 |
68 |
69 | def test_get_cache_dir_venv_ro_project_ro(monkeypatch: MonkeyPatch) -> None:
70 | """Test behaviors of get_cache_dir with read-only virtual environment and read only project directory.
71 |
72 | Args:
73 | monkeypatch: Pytest fixture for monkeypatching
74 | """
75 | monkeypatch.setenv("VIRTUAL_ENV", "/")
76 | monkeypatch.delenv("ANSIBLE_HOME", raising=False)
77 | with (
78 | pytest.warns(
79 | UserWarning,
80 | match=r"Using unique temporary directory .* for caching.",
81 | ),
82 | pytest.warns(
83 | UserWarning,
84 | match=r"Found VIRTUAL_ENV=/ but we cannot use it for caching as it is not writable.",
85 | ),
86 | pytest.warns(
87 | UserWarning,
88 | match=r"Project directory .* cannot be used for caching as it is not writable.",
89 | ),
90 | ):
91 | cache_dir = get_cache_dir(Path("/etc"), isolated=True)
92 | assert cache_dir.as_posix().startswith(tempfile.gettempdir())
93 |
--------------------------------------------------------------------------------
/test/test_runtime.py:
--------------------------------------------------------------------------------
1 | """Tests for Runtime class."""
2 |
3 | # pylint: disable=protected-access,too-many-lines
4 | from __future__ import annotations
5 |
6 | import logging
7 | import os
8 | import pathlib
9 | import subprocess
10 | from contextlib import contextmanager
11 | from pathlib import Path
12 | from shutil import rmtree
13 | from typing import TYPE_CHECKING, Any
14 |
15 | import pytest
16 | from packaging.version import Version
17 |
18 | from ansible_compat.constants import INVALID_PREREQUISITES_RC
19 | from ansible_compat.errors import (
20 | AnsibleCommandError,
21 | AnsibleCompatError,
22 | InvalidPrerequisiteError,
23 | )
24 | from ansible_compat.runtime import (
25 | CompletedProcess,
26 | Runtime,
27 | _get_galaxy_role_name,
28 | is_url,
29 | search_galaxy_paths,
30 | )
31 |
32 | if TYPE_CHECKING:
33 | from collections.abc import Iterator
34 |
35 | from _pytest.monkeypatch import MonkeyPatch
36 | from pytest_mock import MockerFixture
37 |
38 |
39 | def test_runtime_version(runtime: Runtime) -> None:
40 | """Tests version property."""
41 | version = runtime.version
42 | assert isinstance(version, Version)
43 | # tests that caching property value worked (coverage)
44 | assert version == runtime.version
45 |
46 |
47 | @pytest.mark.parametrize(
48 | "require_module",
49 | (True, False),
50 | ids=("module-required", "module-unrequired"),
51 | )
52 | def test_runtime_version_outdated(require_module: bool) -> None:
53 | """Checks that instantiation raises if version is outdated."""
54 | with pytest.raises(RuntimeError, match="Found incompatible version of ansible"):
55 | Runtime(min_required_version="9999.9.9", require_module=require_module)
56 |
57 |
58 | def test_runtime_missing_ansible_module(monkeypatch: MonkeyPatch) -> None:
59 | """Checks that we produce a RuntimeError when ansible module is missing."""
60 |
61 | class RaiseException:
62 | """Class to raise an exception."""
63 |
64 | def __init__(
65 | self,
66 | *args: Any, # noqa: ARG002,ANN401
67 | **kwargs: Any, # noqa: ARG002,ANN401
68 | ) -> None:
69 | raise ModuleNotFoundError
70 |
71 | monkeypatch.setattr("importlib.import_module", RaiseException)
72 |
73 | with pytest.raises(RuntimeError, match=r"Unable to find Ansible python module."):
74 | Runtime(require_module=True)
75 |
76 |
77 | def test_runtime_mismatch_ansible_module(monkeypatch: MonkeyPatch) -> None:
78 | """Test that missing module is detected."""
79 | monkeypatch.setattr("ansible.release.__version__", "0.0.0", raising=False)
80 | with pytest.raises(RuntimeError, match="versions do not match"):
81 | Runtime(require_module=True)
82 |
83 |
84 | def test_runtime_require_module() -> None:
85 | """Check that require_module successful pass."""
86 | Runtime(require_module=True)
87 | # Now we try to set the collection path, something to check if that is
88 | # causing an exception, as 2.15 introduced new init code.
89 | from ansible.utils.collection_loader import ( # pylint: disable=import-outside-toplevel
90 | AnsibleCollectionConfig,
91 | )
92 |
93 | AnsibleCollectionConfig.playbook_paths = "."
94 | # Calling it again in order to see that it does not produce UserWarning: AnsibleCollectionFinder has already been configured
95 | # which is done by Ansible core 2.15+. We added special code inside Runtime
96 | # that should avoid initializing twice and raise that warning.
97 | Runtime(require_module=True)
98 |
99 |
100 | def test_runtime_version_fail_module(mocker: MockerFixture) -> None:
101 | """Tests for failure to detect Ansible version."""
102 | patched = mocker.patch(
103 | "ansible_compat.runtime.parse_ansible_version",
104 | autospec=True,
105 | )
106 | patched.side_effect = InvalidPrerequisiteError(
107 | "Unable to parse ansible cli version",
108 | )
109 | runtime = Runtime()
110 | with pytest.raises(
111 | InvalidPrerequisiteError,
112 | match="Unable to parse ansible cli version",
113 | ):
114 | _ = runtime.version # pylint: disable=pointless-statement
115 |
116 |
117 | def test_runtime_version_fail_cli(mocker: MockerFixture) -> None:
118 | """Tests for failure to detect Ansible version."""
119 | mocker.patch(
120 | "ansible_compat.runtime.Runtime.run",
121 | return_value=CompletedProcess(
122 | ["x"],
123 | returncode=123,
124 | stdout="oops",
125 | stderr="some error",
126 | ),
127 | autospec=True,
128 | )
129 | runtime = Runtime()
130 | with pytest.raises(
131 | RuntimeError,
132 | match=r"Unable to find a working copy of ansible executable.",
133 | ):
134 | _ = runtime.version # pylint: disable=pointless-statement
135 |
136 |
137 | def test_runtime_prepare_ansible_paths_validation() -> None:
138 | """Check that we validate collection_path."""
139 | runtime = Runtime()
140 | runtime.config.collections_paths = "invalid-value" # type: ignore[assignment]
141 | with pytest.raises(RuntimeError, match="Unexpected ansible configuration"):
142 | runtime._prepare_ansible_paths()
143 |
144 |
145 | @pytest.mark.parametrize(
146 | ("folder", "role_name", "isolated"),
147 | (
148 | ("ansible-role-sample", "acme.sample", True),
149 | ("acme.sample2", "acme.sample2", True),
150 | ("sample3", "acme.sample3", True),
151 | ("sample4", "acme.sample4", False),
152 | ),
153 | ids=("1", "2", "3", "4"),
154 | )
155 | def test_runtime_install_role(
156 | caplog: pytest.LogCaptureFixture,
157 | folder: str,
158 | role_name: str,
159 | isolated: bool,
160 | ) -> None:
161 | """Checks that we can install roles."""
162 | caplog.set_level(logging.INFO)
163 | project_dir = Path(__file__).parent / "roles" / folder
164 | runtime = Runtime(isolated=isolated, project_dir=project_dir)
165 | runtime.prepare_environment(install_local=True)
166 | # check that role appears as installed now
167 | result = runtime.run(["ansible-galaxy", "list"])
168 | assert result.returncode == 0, result
169 | assert role_name in result.stdout
170 | if isolated:
171 | assert pathlib.Path(f"{runtime.cache_dir}/roles/{role_name}").is_symlink()
172 | else:
173 | assert pathlib.Path(
174 | f"{Path(runtime.config.default_roles_path[0]).expanduser()}/{role_name}",
175 | ).is_symlink()
176 | runtime.clean()
177 |
178 |
179 | def test_prepare_environment_with_collections(runtime_tmp: Runtime) -> None:
180 | """Check that collections are correctly installed."""
181 | runtime_tmp.prepare_environment(
182 | required_collections={"community.molecule": "0.1.0"},
183 | install_local=True,
184 | )
185 | assert "community.molecule" in runtime_tmp.collections
186 |
187 |
188 | def test_runtime_install_requirements_missing_file() -> None:
189 | """Check that missing requirements file is ignored."""
190 | # Do not rely on this behavior, it may be removed in the future
191 | runtime = Runtime()
192 | runtime.install_requirements(Path("/that/does/not/exist"))
193 |
194 |
195 | @pytest.mark.parametrize(
196 | ("file", "exc", "msg"),
197 | (
198 | (
199 | Path("/dev/null"),
200 | InvalidPrerequisiteError,
201 | "file is not a valid Ansible requirements file",
202 | ),
203 | (
204 | Path(__file__).parent / "assets" / "requirements-invalid-collection.yml",
205 | AnsibleCommandError,
206 | "Got 1 exit code while running: ansible-galaxy",
207 | ),
208 | (
209 | Path(__file__).parent / "assets" / "requirements-invalid-role.yml",
210 | AnsibleCommandError,
211 | "Got 1 exit code while running: ansible-galaxy",
212 | ),
213 | ),
214 | ids=("empty", "invalid-collection", "invalid-role"),
215 | )
216 | def test_runtime_install_requirements_invalid_file(
217 | file: Path,
218 | exc: type[Any],
219 | msg: str,
220 | ) -> None:
221 | """Check that invalid requirements file is raising."""
222 | runtime = Runtime()
223 | with pytest.raises(
224 | exc,
225 | match=msg,
226 | ):
227 | runtime.install_requirements(file)
228 |
229 |
230 | @contextmanager
231 | def cwd(path: Path) -> Iterator[None]:
232 | """Context manager for temporary changing current working directory."""
233 | old_pwd = Path.cwd()
234 | os.chdir(path)
235 | try:
236 | yield
237 | finally:
238 | os.chdir(old_pwd)
239 |
240 |
241 | def test_prerun_reqs_v1(caplog: pytest.LogCaptureFixture) -> None:
242 | """Checks that the linter can auto-install requirements v1 when found."""
243 | path = Path(__file__).parent.parent / "examples" / "reqs_v1"
244 | runtime = Runtime(project_dir=path, verbosity=1)
245 | with cwd(path):
246 | runtime.prepare_environment()
247 | assert any(
248 | msg.startswith("Running ansible-galaxy role install") for msg in caplog.messages
249 | )
250 | assert all(
251 | "Running ansible-galaxy collection install" not in msg
252 | for msg in caplog.messages
253 | )
254 |
255 |
256 | def test_prerun_reqs_v2(caplog: pytest.LogCaptureFixture) -> None:
257 | """Checks that the linter can auto-install requirements v2 when found."""
258 | path = (Path(__file__).parent.parent / "examples" / "reqs_v2").resolve()
259 | runtime = Runtime(project_dir=path, verbosity=1)
260 | with cwd(path):
261 | runtime.prepare_environment()
262 | assert any(
263 | msg.startswith("Running ansible-galaxy role install")
264 | for msg in caplog.messages
265 | )
266 | assert any(
267 | msg.startswith("Running ansible-galaxy collection install")
268 | for msg in caplog.messages
269 | )
270 |
271 |
272 | def test_prerun_reqs_broken() -> None:
273 | """Checks that the we report invalid requirements.yml file."""
274 | path = (Path(__file__).parent.parent / "examples" / "reqs_broken").resolve()
275 | runtime = Runtime(project_dir=path, verbosity=1)
276 | with cwd(path), pytest.raises(InvalidPrerequisiteError):
277 | runtime.prepare_environment()
278 |
279 |
280 | def test__update_env_no_old_value_no_default_no_value(monkeypatch: MonkeyPatch) -> None:
281 | """Make sure empty value does not touch environment."""
282 | monkeypatch.delenv("DUMMY_VAR", raising=False)
283 |
284 | runtime = Runtime()
285 | runtime._update_env("DUMMY_VAR", [])
286 |
287 | assert "DUMMY_VAR" not in runtime.environ
288 |
289 |
290 | def test__update_env_no_old_value_no_value(monkeypatch: MonkeyPatch) -> None:
291 | """Make sure empty value does not touch environment."""
292 | monkeypatch.delenv("DUMMY_VAR", raising=False)
293 |
294 | runtime = Runtime()
295 | runtime._update_env("DUMMY_VAR", [], "a:b")
296 |
297 | assert "DUMMY_VAR" not in runtime.environ
298 |
299 |
300 | def test__update_env_no_default_no_value(monkeypatch: MonkeyPatch) -> None:
301 | """Make sure empty value does not touch environment."""
302 | monkeypatch.setenv("DUMMY_VAR", "a:b")
303 |
304 | runtime = Runtime()
305 | runtime._update_env("DUMMY_VAR", [])
306 |
307 | assert runtime.environ["DUMMY_VAR"] == "a:b"
308 |
309 |
310 | @pytest.mark.parametrize(
311 | ("value", "result"),
312 | (
313 | (["a"], "a"),
314 | (["a", "b"], "a:b"),
315 | (["a", "b", "c"], "a:b:c"),
316 | ),
317 | )
318 | def test__update_env_no_old_value_no_default(
319 | monkeypatch: MonkeyPatch,
320 | value: list[str],
321 | result: str,
322 | ) -> None:
323 | """Values are concatenated using : as the separator."""
324 | monkeypatch.delenv("DUMMY_VAR", raising=False)
325 |
326 | runtime = Runtime()
327 | runtime._update_env("DUMMY_VAR", value)
328 |
329 | assert runtime.environ["DUMMY_VAR"] == result
330 |
331 |
332 | @pytest.mark.parametrize(
333 | ("default", "value", "result"),
334 | (
335 | ("a:b", ["c"], "c:a:b"),
336 | ("a:b", ["c:d"], "c:d:a:b"),
337 | ),
338 | )
339 | def test__update_env_no_old_value(
340 | monkeypatch: MonkeyPatch,
341 | default: str,
342 | value: list[str],
343 | result: str,
344 | ) -> None:
345 | """Values are appended to default value."""
346 | monkeypatch.delenv("DUMMY_VAR", raising=False)
347 |
348 | runtime = Runtime()
349 | runtime._update_env("DUMMY_VAR", value, default)
350 |
351 | assert runtime.environ["DUMMY_VAR"] == result
352 |
353 |
354 | @pytest.mark.parametrize(
355 | ("old_value", "value", "result"),
356 | (
357 | ("a:b", ["c"], "c:a:b"),
358 | ("a:b", ["c:d"], "c:d:a:b"),
359 | ),
360 | )
361 | def test__update_env_no_default(
362 | monkeypatch: MonkeyPatch,
363 | old_value: str,
364 | value: list[str],
365 | result: str,
366 | ) -> None:
367 | """Values are appended to preexisting value."""
368 | monkeypatch.setenv("DUMMY_VAR", old_value)
369 |
370 | runtime = Runtime()
371 | runtime._update_env("DUMMY_VAR", value)
372 |
373 | assert runtime.environ["DUMMY_VAR"] == result
374 |
375 |
376 | @pytest.mark.parametrize(
377 | ("old_value", "default", "value", "result"),
378 | (
379 | ("", "", ["e"], "e"),
380 | ("a", "", ["e"], "e:a"),
381 | ("", "c", ["e"], "e"),
382 | ("a", "c", ["e:f"], "e:f:a"),
383 | ),
384 | )
385 | def test__update_env(
386 | monkeypatch: MonkeyPatch,
387 | old_value: str,
388 | default: str, # pylint: disable=unused-argument # noqa: ARG001
389 | value: list[str],
390 | result: str,
391 | ) -> None:
392 | """Defaults are ignored when preexisting value is present."""
393 | monkeypatch.setenv("DUMMY_VAR", old_value)
394 |
395 | runtime = Runtime()
396 | runtime._update_env("DUMMY_VAR", value)
397 |
398 | assert runtime.environ["DUMMY_VAR"] == result
399 |
400 |
401 | def test_require_collection_wrong_version(runtime: Runtime) -> None:
402 | """Tests behavior of require_collection."""
403 | subprocess.check_output(
404 | [
405 | "ansible-galaxy",
406 | "collection",
407 | "install",
408 | "examples/reqs_v2/community-molecule-0.1.0.tar.gz",
409 | "-p",
410 | "~/.ansible/collections",
411 | ],
412 | )
413 | with pytest.raises(InvalidPrerequisiteError) as pytest_wrapped_e:
414 | runtime.require_collection("community.molecule", "9999.9.9")
415 | assert pytest_wrapped_e.type == InvalidPrerequisiteError
416 | assert pytest_wrapped_e.value.code == INVALID_PREREQUISITES_RC
417 |
418 |
419 | def test_require_collection_invalid_name(runtime: Runtime) -> None:
420 | """Check that require_collection raise with invalid collection name."""
421 | with pytest.raises(
422 | InvalidPrerequisiteError,
423 | match="Invalid collection name supplied:",
424 | ):
425 | runtime.require_collection("that-is-invalid")
426 |
427 |
428 | def test_require_collection_invalid_collections_path(runtime: Runtime) -> None:
429 | """Check that require_collection raise with invalid collections path."""
430 | runtime.config.collections_paths = "/that/is/invalid" # type: ignore[assignment]
431 | with pytest.raises(
432 | InvalidPrerequisiteError,
433 | match="Unable to determine ansible collection paths",
434 | ):
435 | runtime.require_collection("community.molecule")
436 |
437 |
438 | def test_require_collection_preexisting_broken(runtime_tmp: Runtime) -> None:
439 | """Check that require_collection raise with broken pre-existing collection."""
440 | dest_path: str = runtime_tmp.config.collections_paths[0]
441 | dest = pathlib.Path(dest_path) / "ansible_collections" / "foo" / "bar"
442 | dest.mkdir(parents=True, exist_ok=True)
443 | with pytest.raises(InvalidPrerequisiteError, match=r"missing MANIFEST.json"):
444 | runtime_tmp.require_collection("foo.bar")
445 |
446 |
447 | def test_require_collection_install(runtime_tmp: Runtime) -> None:
448 | """Check that require collection successful install case, including upgrade path."""
449 | runtime_tmp.install_collection("ansible.posix:==1.5.2")
450 | runtime_tmp.load_collections()
451 | collection = runtime_tmp.collections["ansible.posix"]
452 | assert collection.version == "1.5.2"
453 | runtime_tmp.require_collection(name="ansible.posix", version="1.5.4", install=True)
454 | runtime_tmp.load_collections()
455 | collection = runtime_tmp.collections["ansible.posix"]
456 | assert Version(collection.version) >= Version("1.5.4")
457 |
458 |
459 | @pytest.mark.parametrize(
460 | ("name", "version", "install"),
461 | (
462 | ("fake_namespace.fake_name", None, True),
463 | ("fake_namespace.fake_name", "9999.9.9", True),
464 | ("fake_namespace.fake_name", None, False),
465 | ),
466 | ids=("a", "b", "c"),
467 | )
468 | def test_require_collection_missing(
469 | name: str,
470 | version: str,
471 | install: bool,
472 | runtime: Runtime,
473 | ) -> None:
474 | """Tests behavior of require_collection, missing case."""
475 | with pytest.raises(AnsibleCompatError) as pytest_wrapped_e:
476 | runtime.require_collection(name=name, version=version, install=install)
477 | assert pytest_wrapped_e.type == InvalidPrerequisiteError
478 | assert pytest_wrapped_e.value.code == INVALID_PREREQUISITES_RC
479 |
480 |
481 | def test_install_collection(runtime: Runtime) -> None:
482 | """Check that valid collection installs do not fail."""
483 | runtime.install_collection("examples/reqs_v2/community-molecule-0.1.0.tar.gz")
484 |
485 |
486 | def test_install_collection_git(runtime: Runtime) -> None:
487 | """Check that valid collection installs do not fail."""
488 | runtime.install_collection(
489 | "git+https://github.com/ansible-collections/ansible.posix,main",
490 | )
491 |
492 |
493 | def test_install_collection_dest(runtime: Runtime, tmp_path: pathlib.Path) -> None:
494 | """Check that valid collection to custom destination passes."""
495 | # Since Ansible 2.15.3 there is no guarantee that this will install the collection at requested path
496 | # as it might decide to not install anything if requirement is already present at another location.
497 | runtime.install_collection(
498 | "examples/reqs_v2/community-molecule-0.1.0.tar.gz",
499 | destination=tmp_path,
500 | )
501 | runtime.load_collections()
502 | for collection in runtime.collections:
503 | if collection == "community.molecule":
504 | return
505 | msg = "Failed to find collection as installed."
506 | raise AssertionError(msg)
507 |
508 |
509 | def test_install_collection_fail(runtime: Runtime) -> None:
510 | """Check that invalid collection install fails."""
511 | with pytest.raises(AnsibleCompatError) as pytest_wrapped_e:
512 | runtime.install_collection("community.molecule:>=9999.0")
513 | assert pytest_wrapped_e.type == InvalidPrerequisiteError
514 | assert pytest_wrapped_e.value.code == INVALID_PREREQUISITES_RC
515 |
516 |
517 | def test_install_galaxy_role(runtime_tmp: Runtime) -> None:
518 | """Check install role with empty galaxy file."""
519 | pathlib.Path(f"{runtime_tmp.project_dir}/galaxy.yml").touch()
520 | pathlib.Path(f"{runtime_tmp.project_dir}/meta").mkdir()
521 | pathlib.Path(f"{runtime_tmp.project_dir}/meta/main.yml").touch()
522 | # this should only raise a warning
523 | runtime_tmp._install_galaxy_role(runtime_tmp.project_dir, role_name_check=1)
524 | # this should test the bypass role name check path
525 | runtime_tmp._install_galaxy_role(runtime_tmp.project_dir, role_name_check=2)
526 | # this should raise an error
527 | with pytest.raises(
528 | InvalidPrerequisiteError,
529 | match="does not follow current galaxy requirements",
530 | ):
531 | runtime_tmp._install_galaxy_role(runtime_tmp.project_dir, role_name_check=0)
532 |
533 |
534 | def test_install_galaxy_role_unlink(
535 | caplog: pytest.LogCaptureFixture,
536 | ) -> None:
537 | """Test ability to unlink incorrect symlinked roles."""
538 | runtime_tmp = Runtime(verbosity=1, isolated=True)
539 | runtime_tmp.prepare_environment()
540 | assert runtime_tmp.cache_dir is not None
541 | pathlib.Path(f"{runtime_tmp.cache_dir}/roles").mkdir(parents=True, exist_ok=True)
542 | roledir = pathlib.Path(f"{runtime_tmp.cache_dir}/roles/acme.get_rich")
543 | if not roledir.exists():
544 | roledir.symlink_to("/dev/null")
545 | pathlib.Path(f"{runtime_tmp.project_dir}/meta").mkdir(exist_ok=True)
546 | pathlib.Path(f"{runtime_tmp.project_dir}/meta/main.yml").write_text(
547 | """galaxy_info:
548 | role_name: get_rich
549 | namespace: acme
550 | """,
551 | encoding="utf-8",
552 | )
553 | runtime_tmp._install_galaxy_role(runtime_tmp.project_dir)
554 | assert "symlink to current repository" in caplog.text
555 | pathlib.Path(f"{runtime_tmp.project_dir}/meta/main.yml").unlink()
556 |
557 |
558 | def test_install_galaxy_role_bad_namespace(runtime_tmp: Runtime) -> None:
559 | """Check install role with bad namespace in galaxy info."""
560 | pathlib.Path(f"{runtime_tmp.project_dir}/meta").mkdir()
561 | pathlib.Path(f"{runtime_tmp.project_dir}/meta/main.yml").write_text(
562 | """galaxy_info:
563 | role_name: foo
564 | author: bar
565 | namespace: ["xxx"]
566 | """,
567 | encoding="utf-8",
568 | )
569 | # this should raise an error regardless the role_name_check value
570 | with pytest.raises(AnsibleCompatError, match="Role namespace must be string, not"):
571 | runtime_tmp._install_galaxy_role(runtime_tmp.project_dir, role_name_check=1)
572 |
573 |
574 | def test_install_galaxy_role_no_meta(runtime_tmp: Runtime) -> None:
575 | """Check install role with missing meta/main.yml."""
576 | # This should fail because meta/main.yml is missing
577 | with pytest.raises(
578 | FileNotFoundError,
579 | match=f"No such file or directory: '{runtime_tmp.project_dir.absolute()}/meta/main.yaml'",
580 | ):
581 | runtime_tmp._install_galaxy_role(runtime_tmp.project_dir)
582 | # But ignore_errors will return without doing anything
583 | runtime_tmp._install_galaxy_role(runtime_tmp.project_dir, ignore_errors=True)
584 |
585 |
586 | @pytest.mark.parametrize(
587 | "galaxy_info",
588 | (
589 | """galaxy_info:
590 | role_name: foo-bar
591 | namespace: acme
592 | """,
593 | """galaxy_info:
594 | role_name: foo-bar
595 | """,
596 | ),
597 | ids=("bad-name", "bad-name-without-namespace"),
598 | )
599 | def test_install_galaxy_role_name_role_name_check_equals_to_1(
600 | runtime_tmp: Runtime,
601 | galaxy_info: str,
602 | caplog: pytest.LogCaptureFixture,
603 | ) -> None:
604 | """Check install role with bad role name in galaxy info."""
605 | caplog.set_level(logging.WARNING)
606 | pathlib.Path(f"{runtime_tmp.project_dir}/meta").mkdir()
607 | pathlib.Path(f"{runtime_tmp.project_dir}/meta/main.yml").write_text(
608 | galaxy_info,
609 | encoding="utf-8",
610 | )
611 |
612 | runtime_tmp._install_galaxy_role(runtime_tmp.project_dir, role_name_check=1)
613 | assert "Computed fully qualified role name of " in caplog.text
614 |
615 |
616 | def test_install_galaxy_role_no_checks(runtime_tmp: Runtime) -> None:
617 | """Check install role with bad namespace in galaxy info."""
618 | runtime_tmp.prepare_environment()
619 | pathlib.Path(f"{runtime_tmp.project_dir}/meta").mkdir()
620 | pathlib.Path(f"{runtime_tmp.project_dir}/meta/main.yml").write_text(
621 | """galaxy_info:
622 | role_name: foo
623 | author: bar
624 | namespace: acme
625 | """,
626 | encoding="utf-8",
627 | )
628 | runtime_tmp._install_galaxy_role(runtime_tmp.project_dir, role_name_check=2)
629 | result = runtime_tmp.run(["ansible-galaxy", "list"])
630 | assert "- acme.foo," in result.stdout
631 | assert result.returncode == 0, result
632 |
633 |
634 | def test_upgrade_collection(runtime_tmp: Runtime) -> None:
635 | """Check that collection upgrade is possible."""
636 | # ensure that we inject our tmp folders in ansible paths
637 | runtime_tmp.prepare_environment()
638 |
639 | # we install specific outdated version of a collection
640 | runtime_tmp.install_collection("examples/reqs_v2/community-molecule-0.1.0.tar.gz")
641 | with pytest.raises(
642 | InvalidPrerequisiteError,
643 | match=r"Found community.molecule collection 0.1.0 but 9.9.9 or newer is required.",
644 | ):
645 | # we check that when install=False, we raise error
646 | runtime_tmp.require_collection("community.molecule", "9.9.9", install=False)
647 | # this should not fail, as we have this version
648 | runtime_tmp.require_collection("community.molecule", "0.1.0")
649 |
650 |
651 | def test_require_collection_not_isolated() -> None:
652 | """Check require_collection without a cache directory."""
653 | runtime = Runtime(isolated=False)
654 | runtime.require_collection("community.molecule", "0.1.0", install=True)
655 |
656 |
657 | def test_runtime_env_ansible_library(monkeypatch: MonkeyPatch) -> None:
658 | """Verify that custom path specified using ANSIBLE_LIBRARY is not lost."""
659 | path_name = "foo"
660 | monkeypatch.setenv("ANSIBLE_LIBRARY", path_name)
661 |
662 | path_name = os.path.realpath(path_name)
663 | runtime = Runtime()
664 | runtime.prepare_environment()
665 | assert path_name in runtime.config.default_module_path
666 |
667 |
668 | @pytest.mark.parametrize(
669 | ("lower", "upper", "expected"),
670 | (
671 | ("1.0", "9999.0", True),
672 | (None, "9999.0", True),
673 | ("1.0", None, True),
674 | ("9999.0", None, False),
675 | (None, "1.0", False),
676 | ),
677 | ids=("1", "2", "3", "4", "5"),
678 | )
679 | def test_runtime_version_in_range(
680 | lower: str | None,
681 | upper: str | None,
682 | expected: bool,
683 | ) -> None:
684 | """Validate functioning of version_in_range."""
685 | runtime = Runtime()
686 | assert runtime.version_in_range(lower=lower, upper=upper) is expected
687 |
688 |
689 | @pytest.mark.parametrize(
690 | ("path", "scenario", "expected_collections"),
691 | (
692 | pytest.param(
693 | "test/collections/acme.goodies",
694 | "default",
695 | [
696 | "ansible.posix", # from tests/requirements.yml
697 | "ansible.utils", # from galaxy.yml
698 | "community.molecule", # from galaxy.yml
699 | "community.crypto", # from galaxy.yml as a git dependency
700 | ],
701 | id="normal",
702 | ),
703 | pytest.param(
704 | "test/collections/acme.goodies/roles/baz",
705 | "deep_scenario",
706 | ["community.molecule"],
707 | id="deep",
708 | ),
709 | ),
710 | )
711 | def test_install_collection_from_disk(
712 | path: str,
713 | scenario: str,
714 | expected_collections: list[str],
715 | ) -> None:
716 | """Tests ability to install a local collection."""
717 | # ensure we do not have acme.goodies installed in user directory as it may
718 | # produce false positives
719 | rmtree(
720 | pathlib.Path(
721 | "~/.ansible/collections/ansible_collections/acme/goodies",
722 | ).expanduser(),
723 | ignore_errors=True,
724 | )
725 | with cwd(Path(path)):
726 | runtime = Runtime(isolated=True)
727 | # this should call install_collection_from_disk(".")
728 | runtime.prepare_environment(install_local=True)
729 | # that molecule converge playbook can be used without molecule and
730 | # should validate that the installed collection is available.
731 | result = runtime.run(["ansible-playbook", f"molecule/{scenario}/converge.yml"])
732 | assert result.returncode == 0, result.stdout
733 | runtime.load_collections()
734 | for collection_name in expected_collections:
735 | assert (
736 | collection_name in runtime.collections
737 | ), f"{collection_name} not found in {runtime.collections.keys()}"
738 | runtime.clean()
739 |
740 |
741 | @pytest.mark.parametrize(
742 | ("path", "expected_plugins"),
743 | (
744 | pytest.param(
745 | "test/collections/acme.goodies",
746 | [
747 | "ansible.posix.patch", # from tests/requirements.yml
748 | "community.crypto.acme_account", # from galaxy.yml as a git dependency
749 | ],
750 | id="modules",
751 | ),
752 | ),
753 | )
754 | def test_load_plugins(
755 | path: str,
756 | expected_plugins: list[str],
757 | ) -> None:
758 | """Tests ability to load plugin from a collection installed by requirement."""
759 | with cwd(Path(path)):
760 | runtime = Runtime(isolated=True, require_module=True)
761 | runtime.prepare_environment(install_local=True)
762 | for plugin_name in expected_plugins:
763 | assert (
764 | plugin_name in runtime.plugins.module
765 | ), f"Unable to load module {plugin_name}"
766 |
767 | runtime.clean()
768 |
769 |
770 | def test_install_collection_from_disk_fail() -> None:
771 | """Tests that we fail to install a broken collection."""
772 | with cwd(Path("test/collections/acme.broken")):
773 | runtime = Runtime(isolated=True)
774 | with pytest.raises(RuntimeError) as exc_info:
775 | runtime.prepare_environment(install_local=True)
776 | # based on version of Ansible used, we might get a different error,
777 | # but both errors should be considered acceptable
778 | assert exc_info.type in {
779 | RuntimeError,
780 | AnsibleCompatError,
781 | AnsibleCommandError,
782 | InvalidPrerequisiteError,
783 | }
784 | assert exc_info.match(
785 | "(is missing the following mandatory|Got 1 exit code while running: ansible-galaxy collection build)",
786 | )
787 |
788 |
789 | def test_load_collections_failure(mocker: MockerFixture) -> None:
790 | """Tests for ansible-galaxy erroring."""
791 | mocker.patch(
792 | "ansible_compat.runtime.Runtime.run",
793 | return_value=CompletedProcess(
794 | ["x"],
795 | returncode=1,
796 | stdout="There was an error",
797 | stderr="This is the error",
798 | ),
799 | autospec=True,
800 | )
801 | runtime = Runtime()
802 | with pytest.raises(RuntimeError, match="Unable to list collections: "):
803 | runtime.load_collections()
804 |
805 |
806 | @pytest.mark.parametrize(
807 | "value",
808 | ("[]", '{"path": "bad data"}', '{"path": {"ansible.posix": 123}}'),
809 | ids=["list", "malformed_collection", "bad_collection_data"],
810 | )
811 | def test_load_collections_garbage(value: str, mocker: MockerFixture) -> None:
812 | """Tests for ansible-galaxy returning bad data."""
813 | mocker.patch(
814 | "ansible_compat.runtime.Runtime.run",
815 | return_value=CompletedProcess(
816 | ["x"],
817 | returncode=0,
818 | stdout=value,
819 | stderr="",
820 | ),
821 | autospec=True,
822 | )
823 | runtime = Runtime()
824 | with pytest.raises(TypeError, match="Unexpected collection data, "):
825 | runtime.load_collections()
826 |
827 |
828 | @pytest.mark.parametrize(
829 | "value",
830 | ("", '{"path": {123: 456}}'),
831 | ids=["nothing", "bad_collection_name"],
832 | )
833 | def test_load_collections_invalid_json(value: str, mocker: MockerFixture) -> None:
834 | """Tests for ansible-galaxy returning bad data."""
835 | mocker.patch(
836 | "ansible_compat.runtime.Runtime.run",
837 | return_value=CompletedProcess(
838 | ["x"],
839 | returncode=0,
840 | stdout=value,
841 | stderr="",
842 | ),
843 | autospec=True,
844 | )
845 | runtime = Runtime()
846 | with pytest.raises(
847 | RuntimeError,
848 | match=f"Unable to parse galaxy output as JSON: {value}",
849 | ):
850 | runtime.load_collections()
851 |
852 |
853 | def test_prepare_environment_offline_role(caplog: pytest.LogCaptureFixture) -> None:
854 | """Ensure that we can make use of offline roles."""
855 | with cwd(Path("test/roles/acme.missing_deps")):
856 | runtime = Runtime(isolated=True)
857 | runtime.prepare_environment(install_local=True, offline=True)
858 | assert (
859 | "Skipped installing old role dependencies due to running in offline mode."
860 | in caplog.text
861 | )
862 | assert (
863 | "Skipped installing collection dependencies due to running in offline mode."
864 | in caplog.text
865 | )
866 |
867 |
868 | def test_runtime_run(runtime: Runtime) -> None:
869 | """Check if tee and non tee mode return same kind of results."""
870 | result1 = runtime.run(["seq", "10"])
871 | result2 = runtime.run(["seq", "10"], tee=True)
872 | assert result1.returncode == result2.returncode
873 | assert result1.stderr == result2.stderr
874 | assert result1.stdout == result2.stdout
875 |
876 |
877 | def test_runtime_exec_cwd(runtime: Runtime) -> None:
878 | """Check if passing cwd works as expected."""
879 | path = Path("/")
880 | result1 = runtime.run(["pwd"], cwd=path)
881 | result2 = runtime.run(["pwd"])
882 | assert result1.stdout.rstrip() == str(path)
883 | assert result1.stdout != result2.stdout
884 |
885 |
886 | def test_runtime_exec_env(runtime: Runtime) -> None:
887 | """Check if passing env works."""
888 | result = runtime.run(["printenv", "FOO"])
889 | assert not result.stdout
890 |
891 | result = runtime.run(["printenv", "FOO"], env={"FOO": "bar"})
892 | assert result.stdout.rstrip() == "bar"
893 |
894 | runtime.environ["FOO"] = "bar"
895 | result = runtime.run(["printenv", "FOO"])
896 | assert result.stdout.rstrip() == "bar"
897 |
898 |
899 | def test_runtime_plugins(runtime: Runtime) -> None:
900 | """Tests ability to access detected plugins."""
901 | assert len(runtime.plugins.cliconf) == 0
902 | # ansible.netcommon.restconf might be in httpapi
903 | assert isinstance(runtime.plugins.httpapi, dict)
904 | # "ansible.netcommon.default" might be in runtime.plugins.netconf
905 | assert isinstance(runtime.plugins.netconf, dict)
906 | assert isinstance(runtime.plugins.role, dict)
907 | assert "become" in runtime.plugins.keyword
908 |
909 | assert "ansible.builtin.sudo" in runtime.plugins.become
910 | assert "ansible.builtin.memory" in runtime.plugins.cache
911 | assert "ansible.builtin.default" in runtime.plugins.callback
912 | assert "ansible.builtin.local" in runtime.plugins.connection
913 | assert "ansible.builtin.ini" in runtime.plugins.inventory
914 | assert "ansible.builtin.env" in runtime.plugins.lookup
915 | assert "ansible.builtin.sh" in runtime.plugins.shell
916 | assert "ansible.builtin.host_group_vars" in runtime.plugins.vars
917 | assert "ansible.builtin.file" in runtime.plugins.module
918 | assert "ansible.builtin.free" in runtime.plugins.strategy
919 | assert "ansible.builtin.is_abs" in runtime.plugins.test
920 | assert "ansible.builtin.bool" in runtime.plugins.filter
921 |
922 |
923 | @pytest.mark.parametrize(
924 | ("path", "result"),
925 | (
926 | pytest.param(
927 | Path("test/assets/galaxy_paths"),
928 | [Path("test/assets/galaxy_paths/foo/galaxy.yml").resolve()],
929 | id="1",
930 | ),
931 | pytest.param(
932 | Path("test/collections"),
933 | [], # should find nothing because these folders are not valid namespaces
934 | id="2",
935 | ),
936 | pytest.param(
937 | Path("test/assets/galaxy_paths/foo"),
938 | [Path("test/assets/galaxy_paths/foo/galaxy.yml").resolve()],
939 | id="3",
940 | ),
941 | ),
942 | )
943 | def test_galaxy_path(path: Path, result: list[Path]) -> None:
944 | """Check behavior of galaxy path search."""
945 | assert search_galaxy_paths(path) == result
946 |
947 |
948 | @pytest.mark.parametrize(
949 | ("name", "result"),
950 | (
951 | pytest.param(
952 | "foo",
953 | False,
954 | id="0",
955 | ),
956 | pytest.param(
957 | "git+git",
958 | True,
959 | id="1",
960 | ),
961 | pytest.param(
962 | "git@acme.com",
963 | True,
964 | id="2",
965 | ),
966 | ),
967 | )
968 | def test_is_url(name: str, result: bool) -> None:
969 | """Checks functionality of is_url."""
970 | assert is_url(name) == result
971 |
972 |
973 | @pytest.mark.parametrize(
974 | ("dest", "message"),
975 | (
976 | ("/invalid/destination", "Collection is symlinked, but not pointing to"),
977 | (Path.cwd(), "Found symlinked collection, skipping its installation."),
978 | ),
979 | ids=["broken", "valid"],
980 | )
981 | def test_prepare_environment_symlink(
982 | dest: str | Path,
983 | message: str,
984 | caplog: pytest.LogCaptureFixture,
985 | ) -> None:
986 | """Ensure avalid symlinks to collections are properly detected."""
987 | project_dir = Path(__file__).parent / "collections" / "acme.minimal"
988 | runtime = Runtime(isolated=True, project_dir=project_dir)
989 | assert runtime.cache_dir
990 | acme = runtime.cache_dir / "collections" / "ansible_collections" / "acme"
991 | acme.mkdir(parents=True, exist_ok=True)
992 | goodies = acme / "minimal"
993 | rmtree(goodies, ignore_errors=True)
994 | goodies.unlink(missing_ok=True)
995 | goodies.symlink_to(dest)
996 | runtime.prepare_environment(install_local=True)
997 | assert message in caplog.text
998 |
999 |
1000 | def test_get_galaxy_role_name_invalid() -> None:
1001 | """Verifies that function returns empty string on invalid input."""
1002 | galaxy_infos = {
1003 | "role_name": False, # <-- invalid data, should be string
1004 | }
1005 | assert not _get_galaxy_role_name(galaxy_infos)
1006 |
1007 |
1008 | def test_runtime_has_playbook() -> None:
1009 | """Tests has_playbook method."""
1010 | runtime = Runtime(require_module=True)
1011 |
1012 | runtime.prepare_environment(
1013 | required_collections={"community.molecule": "0.1.0"},
1014 | install_local=True,
1015 | )
1016 |
1017 | assert not runtime.has_playbook("this-does-not-exist.yml")
1018 | # call twice to ensure cache is used:
1019 | assert not runtime.has_playbook("this-does-not-exist.yml")
1020 |
1021 | assert not runtime.has_playbook("this-does-not-exist.yml", basedir=Path())
1022 | # this is part of community.molecule collection
1023 | assert runtime.has_playbook("community.molecule.validate.yml")
1024 |
1025 |
1026 | def test_runtime_exception(monkeypatch: pytest.MonkeyPatch) -> None:
1027 | """Asserts that we raise a runtime exception if unsupported environment variable is detected."""
1028 | monkeypatch.setenv("ANSIBLE_COLLECTIONS_PATHS", "foo")
1029 | with pytest.raises(
1030 | RuntimeError,
1031 | match=r"ANSIBLE_COLLECTIONS_PATHS was detected, replace it with ANSIBLE_COLLECTIONS_PATH to continue.",
1032 | ):
1033 | Runtime()
1034 |
--------------------------------------------------------------------------------
/test/test_runtime_example.py:
--------------------------------------------------------------------------------
1 | """Sample use of Runtime class."""
2 |
3 | from ansible_compat.runtime import Runtime
4 |
5 |
6 | def test_runtime_example() -> None:
7 | """Test basic functionality of Runtime class."""
8 | # instantiate the runtime using isolated mode, so installing new
9 | # roles/collections do not pollute the default setup.
10 | runtime = Runtime(isolated=True, max_retries=3)
11 |
12 | # Print Ansible core version
13 | _ = runtime.version # 2.9.10 (Version object)
14 | # Get configuration info from runtime
15 | _ = runtime.config.collections_path
16 |
17 | # Detect if current project is a collection and install its requirements
18 | runtime.prepare_environment(install_local=True) # will retry 3 times if needed
19 |
20 | # Install a new collection (will retry 3 times if needed)
21 | runtime.install_collection("examples/reqs_v2/community-molecule-0.1.0.tar.gz")
22 |
23 | # Execute a command
24 | result = runtime.run(["ansible-doc", "--list"])
25 | assert result.returncode == 0
26 |
--------------------------------------------------------------------------------
/test/test_runtime_scan_path.py:
--------------------------------------------------------------------------------
1 | """Test the scan path functionality of the runtime."""
2 |
3 | import json
4 | import os
5 | import subprocess
6 | import textwrap
7 | from pathlib import Path
8 |
9 | import pytest
10 | from _pytest.monkeypatch import MonkeyPatch
11 |
12 | from ansible_compat.runtime import Runtime
13 |
14 | from .conftest import VirtualEnvironment
15 |
16 | V2_COLLECTION_TARBALL = Path("examples/reqs_v2/community-molecule-0.1.0.tar.gz")
17 | V2_COLLECTION_NAMESPACE = "community"
18 | V2_COLLECTION_NAME = "molecule"
19 | V2_COLLECTION_VERSION = "0.1.0"
20 | V2_COLLECTION_FULL_NAME = f"{V2_COLLECTION_NAMESPACE}.{V2_COLLECTION_NAME}"
21 |
22 |
23 | @pytest.mark.parametrize(
24 | ("scan", "raises_not_found"),
25 | (
26 | pytest.param(False, True, id="disabled"),
27 | pytest.param(True, False, id="enabled"),
28 | ),
29 | ids=str,
30 | )
31 | def test_scan_sys_path(
32 | venv_module: VirtualEnvironment,
33 | monkeypatch: MonkeyPatch,
34 | tmp_path: Path,
35 | scan: bool,
36 | raises_not_found: bool,
37 | ) -> None:
38 | """Confirm sys path is scanned for collections.
39 |
40 | Args:
41 | venv_module: Fixture for a virtual environment
42 | monkeypatch: Fixture for monkeypatching
43 | tmp_path: Fixture for a temporary directory
44 | scan: Whether to scan the sys path
45 | raises_not_found: Whether the collection is expected to be found
46 | """
47 | # Isolated the test from the others, so ansible will not find collections
48 | # that might be installed by other tests.
49 | monkeypatch.setenv("VIRTUAL_ENV", venv_module.project.as_posix())
50 | monkeypatch.setenv("ANSIBLE_HOME", tmp_path.as_posix())
51 | # Set the sys scan path environment variable
52 | monkeypatch.setenv("ANSIBLE_COLLECTIONS_SCAN_SYS_PATH", str(scan))
53 | # Set the ansible collections paths to avoid bleed from other tests
54 | monkeypatch.setenv("ANSIBLE_COLLECTIONS_PATH", str(tmp_path))
55 |
56 | runtime_tmp = Runtime(project_dir=tmp_path, isolated=True)
57 | first_site_package_dir = venv_module.site_package_dirs()[0]
58 |
59 | installed_to = (
60 | first_site_package_dir
61 | / "ansible_collections"
62 | / V2_COLLECTION_NAMESPACE
63 | / V2_COLLECTION_NAME
64 | )
65 | if not installed_to.exists():
66 | # Install the collection into the venv site packages directory, force
67 | # as of yet this test is not isolated from the rest of the system
68 | runtime_tmp.install_collection(
69 | collection=V2_COLLECTION_TARBALL,
70 | destination=first_site_package_dir,
71 | force=True,
72 | )
73 | # Confirm the collection is installed
74 | assert installed_to.exists()
75 |
76 | script = textwrap.dedent(
77 | f"""
78 | import json;
79 | from ansible_compat.runtime import Runtime;
80 | r = Runtime();
81 | fv, cp = r.require_collection(name="{V2_COLLECTION_FULL_NAME}", version="{V2_COLLECTION_VERSION}", install=False);
82 | print(json.dumps({{"found_version": str(fv), "collection_path": str(cp)}}));
83 | """,
84 | )
85 |
86 | proc = venv_module.python_script_run(script)
87 | if raises_not_found:
88 | assert proc.returncode != 0, (proc.stdout, proc.stderr)
89 | assert "InvalidPrerequisiteError" in proc.stderr
90 | assert "'community.molecule' not found" in proc.stderr
91 | else:
92 | assert proc.returncode == 0, (proc.stdout, proc.stderr)
93 | result = json.loads(proc.stdout)
94 | assert result["found_version"] == V2_COLLECTION_VERSION
95 | assert result["collection_path"] == str(installed_to)
96 |
97 | runtime_tmp.clean()
98 |
99 |
100 | def test_ro_venv() -> None:
101 | """Tests behavior when the virtual environment is read-only.
102 |
103 | See Related https://github.com/ansible/ansible-compat/pull/470
104 | """
105 | tox_work_dir = os.environ.get("TOX_WORK_DIR", ".tox")
106 | venv_path = f"{tox_work_dir}/ro"
107 | commands = [
108 | f"mkdir -p {venv_path}",
109 | f"chmod -R a+w {venv_path}",
110 | f"python -m venv --symlinks {venv_path}",
111 | f"{venv_path}/bin/python -m pip install -q -e .",
112 | f"chmod -R a-w {venv_path}",
113 | f"{venv_path}/bin/python -c \"from ansible_compat.runtime import Runtime; r = Runtime(); r.install_collection('ansible.posix:>=2.0.0')\"",
114 | ]
115 | for cmd in commands:
116 | result = subprocess.run( # noqa: S602
117 | cmd,
118 | check=False,
119 | shell=True,
120 | text=True,
121 | capture_output=True,
122 | )
123 | assert (
124 | result.returncode == 0
125 | ), f"Got {result.returncode} running {cmd}\n\tstderr: {result.stderr}\n\tstdout: {result.stdout}"
126 |
--------------------------------------------------------------------------------
/test/test_schema.py:
--------------------------------------------------------------------------------
1 | """Tests for schema utilities."""
2 |
3 | from __future__ import annotations
4 |
5 | import json
6 | from pathlib import Path
7 | from typing import TYPE_CHECKING, Any
8 |
9 | import pytest
10 |
11 | from ansible_compat.schema import JsonSchemaError, json_path, validate
12 |
13 | if TYPE_CHECKING:
14 | from ansible_compat.types import JSON
15 |
16 | expected_results = [
17 | JsonSchemaError(
18 | message="False is not of type 'string'",
19 | data_path="environment.a",
20 | json_path="$.environment.a",
21 | schema_path="properties.environment.additionalProperties.type",
22 | relative_schema='{"type": "string"}',
23 | expected="string",
24 | validator="type",
25 | found="False",
26 | ),
27 | JsonSchemaError(
28 | message="True is not of type 'string'",
29 | data_path="environment.b",
30 | json_path="$.environment.b",
31 | schema_path="properties.environment.additionalProperties.type",
32 | relative_schema='{"type": "string"}',
33 | expected="string",
34 | validator="type",
35 | found="True",
36 | ),
37 | ]
38 |
39 |
40 | def json_from_asset(file_name: str) -> JSON:
41 | """Load a json file from disk."""
42 | file = Path(__file__).parent / file_name
43 | with file.open(encoding="utf-8") as f:
44 | return json.load(f) # type: ignore[no-any-return]
45 |
46 |
47 | def jsonify(data: Any) -> JSON: # noqa: ANN401
48 | """Convert object in JSON data structure."""
49 | return json.loads(json.dumps(data, default=vars, sort_keys=True)) # type: ignore[no-any-return]
50 |
51 |
52 | @pytest.mark.parametrize("index", range(1))
53 | def test_schema(index: int) -> None:
54 | """Test the schema validator."""
55 | schema = json_from_asset(f"assets/validate{index}_schema.json")
56 | data = json_from_asset(f"assets/validate{index}_data.json")
57 | expected = json_from_asset(f"assets/validate{index}_expected.json")
58 |
59 | # ensure we produce consistent results between runs
60 | for _ in range(1, 100):
61 | found_errors = validate(schema=schema, data=data)
62 | # ensure returned results are already sorted, as we assume our class
63 | # knows how to sort itself
64 | assert sorted(found_errors) == found_errors, "multiple errors not sorted"
65 |
66 | found_errors_json = jsonify(found_errors)
67 | assert (
68 | found_errors_json == expected
69 | ), f"inconsistent returns: {found_errors_json}"
70 |
71 |
72 | def test_json_path() -> None:
73 | """Test json_path function."""
74 | assert json_path(["a", 1, "b"]) == "$.a[1].b"
75 |
76 |
77 | def test_validate_invalid_schema() -> None:
78 | """Test validate function error handling."""
79 | schema = "[]"
80 | data = json_from_asset("assets/validate0_data.json")
81 | errors = validate(schema, data)
82 |
83 | assert len(errors) == 1
84 | assert (
85 | errors[0].to_friendly()
86 | == "In 'schema sanity check': Invalid schema, must be a mapping."
87 | )
88 |
--------------------------------------------------------------------------------
/test/test_types.py:
--------------------------------------------------------------------------------
1 | """Tests for types module."""
2 |
3 | import ansible_compat.types
4 |
5 |
6 | def test_types() -> None:
7 | """Tests that JSON types are exported."""
8 | assert ansible_compat.types.JSON
9 | assert ansible_compat.types.JSON_ro
10 |
--------------------------------------------------------------------------------
/test/test_version.py:
--------------------------------------------------------------------------------
1 | """Tests for _version module."""
2 |
3 |
4 | def test_version_module() -> None:
5 | """Tests that _version exports are present."""
6 | # import kept here to allow mypy/pylint to run when module is not installed
7 | # and the generated _version.py is missing.
8 | # pylint: disable=no-name-in-module,no-member
9 | import ansible_compat._version # type: ignore[import-not-found,unused-ignore]
10 |
11 | assert ansible_compat._version.__version__
12 | assert ansible_compat._version.__version_tuple__
13 | assert ansible_compat._version.version
14 |
--------------------------------------------------------------------------------
/tools/get-version.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | set -e
3 | {
4 | python3 -c "import setuptools_scm" >/dev/null || {
5 | if [[ "$VIRTUAL_ENV" != "" ]]; then
6 | PIPARGS=""
7 | else
8 | PIPARGS="--user"
9 | fi
10 | python3 -m pip install $PIPARGS setuptools-scm
11 | }
12 | } 1>&2 # redirect stdout to stderr to avoid polluting the output
13 | python3 -m setuptools_scm | \
14 | sed 's/Guessed Version\([^+]\+\).*/\1/'
15 |
--------------------------------------------------------------------------------
/tools/report-coverage:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | set -euo pipefail
3 | coverage combine -q "--data-file=${TOX_ENV_DIR}/.coverage" "${TOX_ENV_DIR}"/.coverage.*
4 | coverage xml "--data-file=${TOX_ENV_DIR}/.coverage" -o "${TOX_ENV_DIR}/coverage.xml" --ignore-errors --fail-under=0
5 | COVERAGE_FILE="${TOX_ENV_DIR}/.coverage" coverage lcov --fail-under=0 --ignore-errors -q
6 | COVERAGE_FILE="${TOX_ENV_DIR}/.coverage" coverage report --fail-under=0 --ignore-errors
7 |
--------------------------------------------------------------------------------
/tools/smoke.py:
--------------------------------------------------------------------------------
1 | #!python3
2 | """Runs downstream projects tests with current code from compat injected in them."""
3 |
4 | import hashlib
5 | import logging
6 | import os
7 | import tempfile
8 | from pathlib import Path
9 | from subprocess import run # noqa: S404
10 |
11 | logging.basicConfig(
12 | level=logging.DEBUG,
13 | format="%(levelname)s: %(message)s",
14 | )
15 | logger = logging.getLogger()
16 |
17 |
18 | parent_project_dir = Path(__file__).parent.parent.resolve().as_posix()
19 | checksum = hashlib.sha256(parent_project_dir.encode("utf-8")).hexdigest()[:4]
20 | tmp_path = Path(tempfile.gettempdir()) / f"ansible-compat-smoke-{checksum}"
21 |
22 | logger.info("Using %s temporary directory...", tmp_path)
23 |
24 | for project in ("molecule", "ansible-lint"):
25 |
26 | logger.info("Running tests for %s", project)
27 | project_dir = tmp_path / project
28 | if (project_dir / ".git").exists():
29 | run(["git", "-C", project_dir, "pull"], check=True)
30 | else:
31 | project_dir.mkdir(parents=True, exist_ok=True)
32 | run(
33 | [
34 | "git",
35 | "clone",
36 | "--recursive",
37 | f"https://github.com/ansible/{project}",
38 | project_dir,
39 | ],
40 | check=True,
41 | )
42 |
43 | os.chdir(project_dir)
44 | venv_dir = (project_dir / ".venv").as_posix()
45 | os.environ["VIRTUAL_ENV"] = venv_dir
46 | run(
47 | ["uv", "venv", "--seed", venv_dir],
48 | check=True,
49 | ) # creates .venv (implicit for next commands)
50 | run(
51 | ["uv", "pip", "install", "-e", f"{parent_project_dir}[test]", "-e", ".[test]"],
52 | check=True,
53 | )
54 | run(["uv", "pip", "freeze"], check=True)
55 | run(["uv", "run", "pytest", "-v", "-n", "auto"], check=True)
56 |
--------------------------------------------------------------------------------
/tools/update-spec.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | DIR=$(dirname "$0")
3 | VERSION=$(./tools/get-version.sh)
4 | mkdir -p "${DIR}/../dist"
5 | sed -e "s/VERSION_PLACEHOLDER/${VERSION}/" \
6 | "${DIR}/../.config/python3-ansible-compat.spec" \
7 | > "${DIR}/../dist/python3-ansible-compat.spec"
8 |
9 | export LC_ALL=en_US.UTF-8
10 | CHANGELOG=$(git log -n 20 --pretty="* %ad %an %ae \n- %s\n" --date=format:"%a %b %d %Y")
11 | NUM=$(grep -nr "%changelog" ${DIR}/../dist/python3-ansible-compat.spec|awk -F':' '{print $1}')
12 | let NUM_START=$NUM+1
13 | NUM_END=$(awk '{print NR}' ${DIR}/../dist/pytho3n-ansible-compat.spec|tail -n1)
14 | sed -i "${NUM_START},${NUM_END}d" ${DIR}/../dist/python3-ansible-compat.spec
15 | echo -e "$CHANGELOG" >> ${DIR}/../dist/python3-ansible-compat.spec
16 |
--------------------------------------------------------------------------------
/tox.ini:
--------------------------------------------------------------------------------
1 | [tox]
2 | envlist =
3 | lint
4 | pkg
5 | docs
6 | py
7 | py-devel
8 | py310-ansible216
9 | py310-ansible217
10 | py311-ansible216
11 | py311-ansible217
12 | py312-ansible216
13 | py312-ansible217
14 | py312-ansible218
15 | py313-ansible218
16 |
17 | isolated_build = true
18 | skip_missing_interpreters = True
19 | requires =
20 | tox >= 4.24.1
21 | tox-uv >= 1.20.1
22 | setuptools >= 65.3.0 # editable installs
23 |
24 | [testenv]
25 | description =
26 | Run the tests
27 | devel: ansible devel branch
28 | ansible216: ansible-core 2.16
29 | ansible217: ansible-core 2.17
30 | ansible218: ansible-core 2.18
31 |
32 | deps =
33 | ansible216: ansible-core>=2.16,<2.17
34 | ansible217: ansible-core>=2.17,<2.18
35 | ansible218: ansible-core>=2.18,<2.19
36 |
37 | devel: ansible-core @ git+https://github.com/ansible/ansible.git@devel # GPLv3+
38 | # avoid installing ansible-core on -devel envs:
39 | !devel: ansible-core
40 | extras =
41 | test
42 |
43 |
44 | commands_pre =
45 | # safety measure to assure we do not accidentally run tests with broken dependencies
46 | !{docs}: {envpython} -m pip check
47 | # cleaning needed to prevent errors between runs
48 | sh -c "rm -f {envdir}/.coverage.* 2>/dev/null || true"
49 | commands =
50 | sh -c "ansible --version | head -n 1"
51 | # We add coverage options but not making them mandatory as we do not want to force
52 | # pytest users to run coverage when they just want to run a single test with `pytest -k test`
53 | coverage run -m pytest {posargs:--junitxml=./junit.xml}
54 | commands_post =
55 | # needed for upload to codecov.io
56 | {py,py310,py311,py312,py313}: ./tools/report-coverage
57 | passenv =
58 | CURL_CA_BUNDLE # https proxies, https://github.com/tox-dev/tox/issues/1437
59 | FORCE_COLOR
60 | HOME
61 | NO_COLOR
62 | PYTEST_* # allows developer to define their own preferences
63 | PYTEST_REQPASS # needed for CI
64 | PYTHON* # PYTHONPYCACHEPREFIX, PYTHONIOENCODING, PYTHONBREAKPOINT,...
65 | PY_COLORS
66 | RTD_TOKEN
67 | REQUESTS_CA_BUNDLE # https proxies
68 | SETUPTOOLS_SCM_DEBUG
69 | SSL_CERT_FILE # https proxies
70 | SSH_AUTH_SOCK # may be needed by git
71 | LANG
72 | LC_*
73 | setenv =
74 | ANSIBLE_HOME = {envdir}/.ansible
75 | ANSIBLE_DEVEL_WARNING='false'
76 | COVERAGE_FILE = {env:COVERAGE_FILE:{envdir}/.coverage.{envname}}
77 | COVERAGE_PROCESS_START={toxinidir}/pyproject.toml
78 | PIP_DISABLE_PIP_VERSION_CHECK = 1
79 | PIP_CONSTRAINT = {toxinidir}/.config/constraints.txt
80 | UV_CONSTRAINT = {toxinidir}/.config/constraints.txt
81 | PRE_COMMIT_COLOR = always
82 | FORCE_COLOR = 1
83 | allowlist_externals =
84 | ansible
85 | git
86 | sh
87 | ./tools/report-coverage
88 | # https://tox.wiki/en/latest/upgrading.html#editable-mode
89 | package = editable
90 | uv_seed = true
91 |
92 | [testenv:lint]
93 | description = Run all linters
94 | # locked basepython is needed because to keep constrains.txt predictable
95 | basepython = python3.10
96 | deps =
97 | pip
98 | pre-commit>=4.0.1
99 | pre-commit-uv>=1.15.0
100 | skip_install = true
101 | usedevelop = false
102 | commands =
103 | pre-commit run -a --show-diff-on-failure {posargs:}
104 | pre-commit run -a pip-compile
105 | passenv =
106 | {[testenv]passenv}
107 | PRE_COMMIT_HOME
108 | setenv =
109 | {[testenv]setenv}
110 | PIP_CONSTRAINT = /dev/null
111 | UV_CONSTRAINT = /dev/null
112 |
113 | [testenv:deps]
114 | description = Bump all test dependencies
115 | basepython = {[testenv:lint]basepython}
116 | envdir = {toxworkdir}/lint
117 | deps = {[testenv:lint]deps}
118 | skip_install = true
119 | commands =
120 | pre-commit run -a --hook-stage manual pip-compile-upgrade
121 | {[testenv:lint]commands}
122 | setenv =
123 | {[testenv]setenv}
124 | PIP_CONSTRAINT = /dev/null
125 | UV_CONSTRAINT = /dev/null
126 |
127 | [testenv:pkg]
128 | description =
129 | Build package, verify metadata, install package and assert behavior when ansible is missing.
130 | deps =
131 | build >= 0.9.0
132 | pip
133 | twine >= 4.0.1
134 | skip_install = true
135 | # Ref: https://twitter.com/di_codes/status/1044358639081975813
136 | commands =
137 | # build wheel and sdist using PEP-517
138 | {envpython} -c 'import os.path, shutil, sys; \
139 | dist_dir = os.path.join("{toxinidir}", "dist"); \
140 | os.path.isdir(dist_dir) or sys.exit(0); \
141 | print("Removing \{!s\} contents...".format(dist_dir), file=sys.stderr); \
142 | shutil.rmtree(dist_dir)'
143 | {envpython} -m build \
144 | --outdir {toxinidir}/dist/ \
145 | {toxinidir}
146 | # Validate metadata using twine
147 | twine check --strict {toxinidir}/dist/*
148 | # Install the wheel
149 | sh -c "python3 -m pip install {toxinidir}/dist/*.whl"
150 | pip uninstall -y ansible-compat
151 |
152 | [testenv:py]
153 | description = Run the tests with {basepython} ansible-core 2.16+
154 | deps =
155 | {[testenv]deps}
156 | ansible-core>=2.16
157 |
158 | [testenv:rpm]
159 | description = Use packit to build RPM (requires RPM based Linux distro)
160 | deps =
161 | packitos
162 | commands =
163 | sh -c "packit build in-mock --root=fedora-40-$(arch)"
164 |
165 | [testenv:docs]
166 | description = Build docs
167 | commands =
168 | mkdocs {posargs:build --strict --site-dir=_readthedocs/html/}
169 | setenv =
170 | # https://squidfunk.github.io/mkdocs-material/plugins/requirements/image-processing/#troubleshooting
171 | DYLD_FALLBACK_LIBRARY_PATH = /opt/homebrew/lib:{env:LD_LIBRARY_PATH}
172 | extras = docs
173 | passenv = *
174 |
175 | [testenv:smoke]
176 | description = Run ansible-lint and molecule own testing with current code from compat library
177 | commands =
178 | python3 tools/smoke.py
179 | del_env =
180 | PIP_CONSTRAINT
181 | UV_CONSTRAINT
182 | editable = true
183 | skip_install = true
184 |
--------------------------------------------------------------------------------